ソースを参照

[jobbrowser] Remove incomplete Hive Querybrowser skeleton (#2743)

- This will enable Hue dump database to work properly.
- We have different implementation of Hive query browser getting data from query store.
- Some of the API/ UI calls and hiveserver2.py side code for the implementation might be still left, but that is not called and invoked.
Harsh Gupta 3 年 前
コミット
ef8a385130

+ 6 - 108
apps/jobbrowser/src/jobbrowser/apis/hive_query_api.py

@@ -29,10 +29,7 @@ from desktop.lib.rest.http_client import HttpClient
 from desktop.lib.rest.resource import Resource
 from notebook.models import _get_notebook_api, make_notebook, MockRequest
 
-
 from jobbrowser.apis.base_api import Api
-from jobbrowser.conf import QUERY_STORE
-from jobbrowser.models import HiveQuery
 
 if sys.version_info[0] > 2:
   from django.utils.translation import gettext as _
@@ -48,76 +45,14 @@ class HiveQueryApi(Api):
   def __init__(self, user, cluster=None):
     self.user = user
     self.cluster = cluster
-    self.api = HiveQueryClient()
 
   def apps(self, filters):
-    queries = self.api.get_queries(filters)
-
-    apps = {
-      "apps": {
-        "queries": [{
-            "details": None,
-            "dags": [],
-            "id": query.id,
-            "queryId": query.query_id,
-            "startTime": query.start_time,
-            "query": query.query.replace('\r\n', ' ')[:60] + ('...' if len(query.query) > 60 else ''),
-            "highlightedQuery": None,
-            "endTime": query.end_time,
-            "elapsedTime": query.elapsed_time,
-            "status": query.status,
-            "queueName": query.queue_name,
-            "userId": query.user_id,
-            "requestUser": query.request_user,
-            "cpuTime": query.cpu_time,
-            "physicalMemory": query.physical_memory,
-            "virtualMemory": query.virtual_memory,
-            "dataRead": query.data_read,
-            "dataWritten": query.data_written,
-            "operationId": query.operation_id,
-            "clientIpAddress": query.client_ip_address,
-            "hiveInstanceAddress": query.hive_instance_address,
-            "hiveInstanceType": query.hive_instance_type,
-            "sessionId": query.session_id,
-            "logId": query.log_id,
-            "threadId": query.thread_id,
-            "executionMode": query.execution_mode,
-            "tablesRead": query.tables_read,
-            "tablesWritten": query.tables_written,
-            "databasesUsed": query.databases_used,
-            "domainId": query.domain_id,
-            "llapAppId": query.llap_app_id,
-            "usedCBO": query.used_cbo,
-            "createdAt": query.created_at
-          }
-          for query in queries
-        ],
-        "meta": {
-            "limit": filters['limit'],
-            "offset": filters['offset'],
-            "size": self.api.get_query_count(filters)
-          }
-      }
-    }
-
-    return apps
+    # Removed Hive Querybrowser skeleton implementation, check git history if needed to add it back.
+    pass
 
   def app(self, appid):
-    query = self.api.get_query(query_id=appid)
-
-    if not query:
-      raise PopupException(_('Could not find query id %s' % appid))
-
-    params = {
-      'extended': 'true',
-      'queryId': query.query_id
-    }
-
-    client = HttpClient(QUERY_STORE.SERVER_URL.get())
-    resource = Resource(client)
-    app = resource.get('api/hive/query', params=params, headers=self.HEADERS)
-
-    return app
+    # Removed Hive Querybrowser skeleton implementation, check git history if needed to add it back.
+    pass
 
   def action(self, query_ids, action):
     message = {'actions': {}, 'status': 0}
@@ -136,7 +71,7 @@ class HiveQueryApi(Api):
           action_details['status'] = -1
           action_details['message'] = _('kill action failed : %s' % str(ex))
 
-        message['actions'][query_id] = action_details;
+        message['actions'][query_id] = action_details
 
     return message
 
@@ -159,7 +94,7 @@ class HiveQueryApi(Api):
   def profile(self, appid, app_type, app_property, app_filters):
     message = {'message': '', 'status': 0}
 
-    return message;
+    return message
 
   def _api_status(self, status):
     if status == 'SUCCESS':
@@ -170,40 +105,3 @@ class HiveQueryApi(Api):
       return 'RUNNING'
     else:
       return 'PAUSED'
-
-
-class HiveQueryClient():
-
-  def _get_all_queries(self):
-    return HiveQuery.objects.using('query').order_by('-start_time')
-
-  def _get_queries(self, filters):
-    queries = self._get_all_queries()
-    queries = queries.filter(start_time__gte=filters['startTime'], end_time__lte=filters['endTime'])
-    if filters['text']:
-      queries = queries.filter(query__icontains=filters['text'])
-
-    for facet in filters['facets']:
-      queries = queries.filter(**{facet['field']+'__in': facet['values']})
-
-    return queries
-
-  def get_query_count(self, filters):
-    filtered_query_list = self._get_queries(filters)
-
-    return len(filtered_query_list)
-
-  def get_queries(self, filters):
-    filtered_query_list = self._get_queries(filters)
-    paginated_query_list = filtered_query_list[filters['offset']:filters['offset'] + filters['limit']]
-
-    return paginated_query_list
-
-  def get_query(self, query_id):
-    return HiveQuery.objects.using('query').get(query_id=query_id)
-
-  def get_query_analysis(self, query_id): pass
-
-  # EXPLAIN with row count
-  # CBO COST
-  # VECTORIZATION?

+ 0 - 130
apps/jobbrowser/src/jobbrowser/apis/hive_query_api_tests.py

@@ -30,10 +30,6 @@ from desktop.conf import QUERY_DATABASE
 from desktop.lib.django_test_util import make_logged_in_client
 from useradmin.models import User
 
-from jobbrowser.apis.hive_query_api import HiveQueryApi, HiveQueryClient
-from jobbrowser.models import HiveQuery
-
-
 if sys.version_info[0] > 2:
   from unittest.mock import patch, Mock
 else:
@@ -75,129 +71,3 @@ class TestHiveQueryApiNotebook():
       )
 
       assert_equal(0, response_data['status'])
-
-
-class TestHiveQueryApi():
-
-  def setUp(self):
-    if not QUERY_DATABASE.HOST.get():
-      raise SkipTest
-
-    self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
-    self.user = rewrite_user(User.objects.get(username="test"))
-    self.filters = {
-      "endTime": 10,
-      "facets": [{"field": "status", "values": ["SUCCESS"]}],
-      "limit": 2,
-      "offset": 0,
-      "sortText": "",
-      "startTime": 1,
-      "text": "select"
-    }
-
-    with connection.schema_editor() as schema_editor:
-      schema_editor.create_model(HiveQuery)
-
-      if HiveQuery._meta.db_table not in connection.introspection.table_names():
-        raise ValueError("Table `{table_name}` is missing in test database.".format(table_name=HiveQuery._meta.db_table))
-
-  def tearDown(self):
-    with connection.schema_editor() as schema_editor:
-      schema_editor.delete_model(HiveQuery)
-
-
-  def test_search_pagination(self):
-    with patch('jobbrowser.apis.hive_query_api.HiveQueryClient._get_all_queries') as _get_all_queries:
-
-      HiveQuery.objects.create(query_id='1', start_time=6, end_time=8, query="select * from employee1", status="SUCCESS")
-      HiveQuery.objects.create(query_id='2', start_time=4, end_time=8, query="select * from employee2", status="ERROR")
-      HiveQuery.objects.create(query_id='3', start_time=7, end_time=9, query="select * from employee3)", status="SUCCESS")
-      HiveQuery.objects.create(query_id='4', start_time=2, end_time=9, query="select * from employee4", status="SUCCESS")
-      HiveQuery.objects.create(query_id='5', start_time=8, end_time=9, query="create table xyz2()", status="SUCCESS")
-      HiveQuery.objects.create(query_id='6', start_time=1, end_time=12, query="create table xyz3()", status="SUCCESS")
-
-      _get_all_queries.return_value = HiveQuery.objects.order_by('-start_time')
-
-      _data = json.dumps(self.filters)
-      response = self.client.post("/jobbrowser/api/jobs/queries-hive", content_type='application/json', data=_data)
-      data = json.loads(response.content)
-
-      assert_equal(2, len(data['queries']))  # pagination
-      assert_equal('3', data['queries'][0]['queryId'])  # query id (with order_by)
-      assert_equal("SUCCESS", data['queries'][0]['status'])  # facet selection
-      assert_true("select" in data['queries'][0]['query'])  # search text
-      assert_equal(3, data['meta']['size'])  # total filtered queries count
-      assert_equal(2, data['meta']['limit'])  # limit value of filter
-
-
-  # TODO
-  # def test_app(self):
-  #   with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_query') as get_query:
-  #     query_id = 'd94d2fb4815a05c4:b1ccec1500000000'
-
-  #     get_query.return_value = Mock(
-  #       query_id=query_id,
-  #       query='SELECT'
-  #     )
-
-  #     query = HiveQueryApi(self.user).app(query_id)
-
-  #     assert_equal(query_id, query['id'])
-
-
-class TestHiveQueryClient():
-
-  def setUp(self):
-    if not QUERY_DATABASE.HOST.get():
-      raise SkipTest
-
-    self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
-    self.user = rewrite_user(User.objects.get(username="test"))
-    self.filters = {
-      'endTime': 1602146114116,
-      'facets': [],
-      'limit': 2,
-      'offset': 0,
-      'sortText': "startTime:DESC",
-      'startTime': 1601541314116,
-      'text': "select"
-    }
-    self.query1 = HiveQuery()
-    self.query2 = HiveQuery()
-    self.query3 = HiveQuery()
-
-    with connection.schema_editor() as schema_editor:
-      schema_editor.create_model(HiveQuery)
-
-      if HiveQuery._meta.db_table not in connection.introspection.table_names():
-        raise ValueError("Table `{table_name}` is missing in test database.".format(table_name=HiveQuery._meta.db_table))
-
-  def tearDown(self):
-    with connection.schema_editor() as schema_editor:
-      schema_editor.delete_model(HiveQuery)
-
-
-  def test__get_all_queries(self):
-    HiveQueryClient()._get_all_queries()
-
-
-  def test_get_query_count(self):
-    with patch('jobbrowser.apis.hive_query_api.HiveQueryClient._get_queries') as _get_queries:
-
-      _get_queries.return_value = [self.query1, self.query2, self.query3]
-      filtered_query_count = HiveQueryClient().get_query_count(self.filters)
-
-      assert_equal(3, filtered_query_count)
-
-
-  def test_get_queries(self):
-    HiveQueryClient().get_queries(self.filters)
-
-
-  def test_get_query(self):
-    with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_query') as get_query:
-      query_id = 'd94d2fb4815a05c4:b1ccec1500000000'
-      self.query1.query_id = query_id
-      get_query.return_value = self.query1
-
-      assert_equal(HiveQueryClient().get_query(query_id).query_id, query_id)

+ 0 - 105
apps/jobbrowser/src/jobbrowser/models.py

@@ -80,111 +80,6 @@ def can_kill_job(self, user):
   return user.username == self.user
 
 
-# You'll have to do the following manually to clean this up:
-#   * Rearrange models' order
-#   * Make sure each model has one field with primary_key=True
-#   * Make sure each ForeignKey has `on_delete` set to the desired behavior.
-#   * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
-# Feel free to rename the models, but don't rename db_table values or field names.
-
-class HiveQuery(models.Model):
-  # (mysql.E001) MySQL does not allow unique CharFields to have a max_length > 255.
-  # query_id = models.CharField(unique=True, max_length=512, blank=True, null=True)
-  id = models.IntegerField(unique=True, blank=True, null=False, primary_key=True)
-  query_id = models.CharField(unique=True, max_length=255, blank=True, null=True)
-  query = models.TextField(blank=True, null=True)
-  query_fts = models.TextField(blank=True, null=True)  # This field type is a guess.
-  start_time = models.BigIntegerField(blank=True, null=True)
-  end_time = models.BigIntegerField(blank=True, null=True)
-  elapsed_time = models.BigIntegerField(blank=True, null=True)
-  status = models.CharField(max_length=32, blank=True, null=True)
-  queue_name = models.CharField(max_length=767, blank=True, null=True)
-  user_id = models.CharField(max_length=256, blank=True, null=True)
-  request_user = models.CharField(max_length=256, blank=True, null=True)
-  cpu_time = models.BigIntegerField(blank=True, null=True)
-  physical_memory = models.BigIntegerField(blank=True, null=True)
-  virtual_memory = models.BigIntegerField(blank=True, null=True)
-  data_read = models.BigIntegerField(blank=True, null=True)
-  data_written = models.BigIntegerField(blank=True, null=True)
-  operation_id = models.CharField(max_length=512, blank=True, null=True)
-  client_ip_address = models.CharField(max_length=64, blank=True, null=True)
-  hive_instance_address = models.CharField(max_length=512, blank=True, null=True)
-  hive_instance_type = models.CharField(max_length=512, blank=True, null=True)
-  session_id = models.CharField(max_length=512, blank=True, null=True)
-  log_id = models.CharField(max_length=512, blank=True, null=True)
-  thread_id = models.CharField(max_length=512, blank=True, null=True)
-  execution_mode = models.CharField(max_length=16, blank=True, null=True)
-  databases_used = models.TextField(blank=True, null=True)  # This field type is a guess.
-  tables_read = models.TextField(blank=True, null=True)  # This field type is a guess.
-  tables_written = models.TextField(blank=True, null=True)  # This field type is a guess.
-  domain_id = models.CharField(max_length=512, blank=True, null=True)
-  llap_app_id = models.CharField(max_length=512, blank=True, null=True)
-  used_cbo = models.CharField(max_length=16, blank=True, null=True)
-  first_task_started_time = models.BigIntegerField(blank=True, null=True)
-  waiting_time = models.BigIntegerField(blank=True, null=True)
-  resource_utilization = models.BigIntegerField(blank=True, null=True)
-  version = models.SmallIntegerField(blank=True, null=True)
-  created_at = models.DateTimeField(blank=True, null=True)
-
-  class Meta:
-    managed = False
-    db_table = 'hive_query'
-
-
-class QueryDetails(models.Model):
-  hive_query = models.ForeignKey(HiveQuery, on_delete=models.CASCADE, unique=True, blank=True, null=True)
-  explain_plan_raw = models.TextField(blank=True, null=True)  # This field type is a guess.
-  configuration_raw = models.TextField(blank=True, null=True)  # This field type is a guess.
-  perf = models.TextField(blank=True, null=True)  # This field type is a guess.
-  configuration_compressed = models.BinaryField(blank=True, null=True)
-  explain_plan_compressed = models.BinaryField(blank=True, null=True)
-
-  class Meta:
-    managed = False
-    db_table = 'query_details'
-
-
-class DagInfo(models.Model):
-  # (mysql.E001) MySQL does not allow unique CharFields to have a max_length > 255.
-  # dag_id = models.CharField(unique=True, max_length=512, blank=True, null=True)
-  dag_id = models.CharField(unique=True, max_length=255, blank=True, null=True)
-  dag_name = models.CharField(max_length=512, blank=True, null=True)
-  application_id = models.CharField(max_length=512, blank=True, null=True)
-  init_time = models.BigIntegerField(blank=True, null=True)
-  start_time = models.BigIntegerField(blank=True, null=True)
-  end_time = models.BigIntegerField(blank=True, null=True)
-  time_taken = models.BigIntegerField(blank=True, null=True)
-  status = models.CharField(max_length=64, blank=True, null=True)
-  am_webservice_ver = models.CharField(max_length=16, blank=True, null=True)
-  am_log_url = models.CharField(max_length=512, blank=True, null=True)
-  queue_name = models.CharField(max_length=64, blank=True, null=True)
-  caller_id = models.CharField(max_length=512, blank=True, null=True)
-  caller_type = models.CharField(max_length=128, blank=True, null=True)
-  hive_query = models.ForeignKey('HiveQuery', on_delete=models.CASCADE, blank=True, null=True)
-  created_at = models.DateTimeField(blank=True, null=True)
-  source_file = models.TextField(blank=True, null=True)
-
-  class Meta:
-    managed = False
-    db_table = 'dag_info'
-
-
-class DagDetails(models.Model):
-  dag_info = models.ForeignKey('DagInfo', on_delete=models.CASCADE, unique=True, blank=True, null=True)
-  hive_query = models.ForeignKey('HiveQuery', on_delete=models.CASCADE, blank=True, null=True)
-  dag_plan_raw = models.TextField(blank=True, null=True)  # This field type is a guess.
-  vertex_name_id_mapping_raw = models.TextField(blank=True, null=True)  # This field type is a guess.
-  diagnostics = models.TextField(blank=True, null=True)
-  counters_raw = models.TextField(blank=True, null=True)  # This field type is a guess.
-  dag_plan_compressed = models.BinaryField(blank=True, null=True)
-  vertex_name_id_mapping_compressed = models.BinaryField(blank=True, null=True)
-  counters_compressed = models.BinaryField(blank=True, null=True)
-
-  class Meta:
-    managed = False
-    db_table = 'dag_details'
-
-
 class LinkJobLogs(object):
 
   @classmethod