Sfoglia il codice sorgente

HUE-4886 [impala] Get resultset row count

Jenny Kim 9 anni fa
parent
commit
7b5ac49

+ 75 - 0
apps/impala/src/impala/server.py

@@ -15,7 +15,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import json
 import logging
+import threading
+
+from desktop.lib.rest.http_client import HttpClient
+from desktop.lib.rest.resource import Resource
 
 from beeswax.server.dbms import QueryServerException
 from beeswax.server.hive_server2_lib import HiveServerClient, HiveServerDataTable
@@ -25,11 +30,31 @@ from ImpalaService import ImpalaHiveServer2Service
 
 LOG = logging.getLogger(__name__)
 
+API_CACHE = None
+API_CACHE_LOCK = threading.Lock()
+
+
+def get_api(user, url):
+  global  API_CACHE
+  if API_CACHE is None:
+    API_CACHE_LOCK.acquire()
+    try:
+      if API_CACHE is None:
+        API_CACHE = ImpalaDaemonApi(url)
+    finally:
+      API_CACHE_LOCK.release()
+  API_CACHE.set_user(user)
+  return API_CACHE
+
 
 class ImpalaServerClientException(Exception):
   pass
 
 
+class ImpalaDaemonApiException(Exception):
+  pass
+
+
 class ImpalaServerClient(HiveServerClient):
 
   def get_exec_summary(self, operation_handle, session_handle):
@@ -100,3 +125,53 @@ class ImpalaServerClient(HiveServerClient):
       return summary_dict
     except Exception, e:
       raise ImpalaServerClientException('Failed to serialize the TExecSummary object: %s' % str(e))
+
+
+class ImpalaDaemonApi(object):
+
+  def __init__(self, server_url):
+    self._url = server_url
+    self._client = HttpClient(self._url, logger=LOG)
+    self._root = Resource(self._client)
+    self._security_enabled = False
+    self._thread_local = threading.local()
+
+
+  def __str__(self):
+    return "ImpalaDaemonApi at %s" % self._url
+
+
+  @property
+  def url(self):
+    return self._url
+
+
+  @property
+  def security_enabled(self):
+    return self._security_enabled
+
+
+  @property
+  def user(self):
+    return self._thread_local.user
+
+
+  def set_user(self, user):
+    if hasattr(user, 'username'):
+      self._thread_local.user = user.username
+    else:
+      self._thread_local.user = user
+
+
+  def get_query_profile(self, query_id):
+    params = {
+      'query_id': query_id,
+      'json': 'true'
+    }
+    profile = None
+    resp = self._root.get('query_profile', params=params)
+    try:
+      profile = json.loads(resp)
+    except ValueError, e:
+      raise ImpalaDaemonApiException('ImpalaDaemonApi query_profile did not return valid JSON.')
+    return profile

+ 100 - 36
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -15,10 +15,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import base64
 import copy
 import logging
 import re
 import StringIO
+import struct
 import time
 
 from django.core.urlresolvers import reverse
@@ -30,6 +32,7 @@ from desktop.lib.conf import BoundConfig
 from desktop.lib.exceptions import StructuredException
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import force_unicode
+from desktop.lib.rest.http_client import RestException
 from desktop.models import DefaultConfiguration
 
 from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout, OperationNotSupported
@@ -39,12 +42,12 @@ LOG = logging.getLogger(__name__)
 
 
 try:
-  from beeswax import data_export
+  from beeswax import conf as beeswax_conf, data_export
   from beeswax.api import _autocomplete, _get_sample_data
   from beeswax.conf import CONFIG_WHITELIST as hive_settings, DOWNLOAD_CELL_LIMIT
   from beeswax.data_export import upload
   from beeswax.design import hql_query, strip_trailing_semicolon, split_statements
-  from beeswax import conf as beeswax_conf
+  from beeswax.hive_site import hiveserver2_use_ssl
   from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, HiveServerQueryHistory, QueryHistory, Session
   from beeswax.server import dbms
   from beeswax.server.dbms import get_query_server_config, QueryServerException
@@ -56,6 +59,7 @@ except ImportError, e:
 try:
   from impala import api   # Force checking if Impala is enabled
   from impala.conf import CONFIG_WHITELIST as impala_settings
+  from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException
 except ImportError, e:
   LOG.warn("Impala app is not enabled")
   impala_settings = None
@@ -183,8 +187,7 @@ class HS2Api(Api):
     response['properties'] = properties
 
     if lang == 'impala':
-      impala_settings = session.get_formatted_properties()
-      http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
+      http_addr = self._get_impala_server_url(session)
       response['http_addr'] = http_addr
 
     return response
@@ -294,43 +297,16 @@ class HS2Api(Api):
       'message': ''
     }
 
-    total_records_match = None
-    total_size_match = None
-
     if snippet.get('status') != 'available':
       raise QueryError(_('Result status is not available'))
 
-    if snippet['type'] != 'hive':
+    if snippet['type'] not in ('hive', 'impala'):
       raise OperationNotSupported(_('Cannot fetch result metadata for snippet type: %s') % snippet['type'])
 
-    engine = self._get_hive_execution_engine(notebook, snippet).lower()
-    logs = self.get_log(notebook, snippet, startFrom=0)
-
-    if engine == 'mr':
-      jobs = self.get_jobs(notebook, snippet, logs)
-      if jobs:
-        last_job_id = jobs[-1].get('name')
-        LOG.info("Hive query executed %d jobs, last job is: %s" % (len(jobs), last_job_id))
-
-        # Attempt to fetch last task's syslog and parse the total records
-        task_syslog = self._get_syslog(last_job_id)
-        if task_syslog:
-          total_records_re = "org.apache.hadoop.hive.ql.exec.FileSinkOperator: RECORDS_OUT_0:(?P<total_records>\d+)"
-          total_records_match = re.search(total_records_re, task_syslog, re.MULTILINE)
-        else:
-          raise QueryError(_('Failed to get task syslog for Hive query with job: %s')  % last_job_id)
-      else:
-        resp['message'] = _('Hive query did not execute any jobs.')
-    elif engine == 'spark':
-      total_records_re = "RECORDS_OUT_0: (?P<total_records>\d+)"
-      total_size_re = "Spark Job\[[a-z0-9-]+\] Metrics[A-Za-z0-9:\s]+ResultSize: (?P<total_size>\d+)"
-      total_records_match = re.search(total_records_re, logs, re.MULTILINE)
-      total_size_match = re.search(total_size_re, logs, re.MULTILINE)
-
-    if total_records_match:
-      resp['rows'] = int(total_records_match.group('total_records'))
-    if total_size_match:
-      resp['size'] = int(total_size_match.group('total_size'))
+    if snippet['type'] == 'hive':
+      resp['rows'], resp['size'], resp['message'] = self._get_hive_result_size(notebook, snippet)
+    else:  # Impala
+      resp['rows'], resp['size'], resp['message'] = self._get_impala_result_size(notebook, snippet)
 
     return resp
 
@@ -684,3 +660,91 @@ class HS2Api(Api):
       attempts += 1
 
     return syslog
+
+
+  def _get_hive_result_size(self, notebook, snippet):
+    total_records_match, total_size_match = None, None
+    total_records, total_size, msg = None, None, None
+    engine = self._get_hive_execution_engine(notebook, snippet).lower()
+    logs = self.get_log(notebook, snippet, startFrom=0)
+
+    if engine == 'mr':
+      jobs = self.get_jobs(notebook, snippet, logs)
+      if jobs:
+        last_job_id = jobs[-1].get('name')
+        LOG.info("Hive query executed %d jobs, last job is: %s" % (len(jobs), last_job_id))
+
+        # Attempt to fetch last task's syslog and parse the total records
+        task_syslog = self._get_syslog(last_job_id)
+        if task_syslog:
+          total_records_re = "org.apache.hadoop.hive.ql.exec.FileSinkOperator: RECORDS_OUT_0:(?P<total_records>\d+)"
+          total_records_match = re.search(total_records_re, task_syslog, re.MULTILINE)
+        else:
+          raise QueryError(_('Failed to get task syslog for Hive query with job: %s') % last_job_id)
+      else:
+        msg = _('Hive query did not execute any jobs.')
+    elif engine == 'spark':
+      total_records_re = "RECORDS_OUT_0: (?P<total_records>\d+)"
+      total_size_re = "Spark Job\[[a-z0-9-]+\] Metrics[A-Za-z0-9:\s]+ResultSize: (?P<total_size>\d+)"
+      total_records_match = re.search(total_records_re, logs, re.MULTILINE)
+      total_size_match = re.search(total_size_re, logs, re.MULTILINE)
+
+    if total_records_match:
+      total_records = int(total_records_match.group('total_records'))
+    if total_size_match:
+      total_size = int(total_size_match.group('total_size'))
+
+    return total_records, total_size, msg
+
+
+  def _get_impala_result_size(self, notebook, snippet):
+    total_records_match = None
+    total_records, total_size, msg = None, None, None
+
+    query_id = self._get_impala_query_id(snippet)
+    session = Session.objects.get_session(self.user, application='impala')
+    protocol = 'https' if hiveserver2_use_ssl() else 'http'
+    server_url = '%s://%s' % (protocol, self._get_impala_server_url(session))
+    if query_id:
+      LOG.info("Attempting to get Impala query profile at server_url %s for query ID: %s" % (server_url, query_id))
+      fragment = self._get_impala_query_profile(server_url, query_id=query_id)
+      total_records_re = "Coordinator Fragment F\d\d.+?RowsReturned: (?P<total_records>\d+).*?Averaged Fragment F\d\d"
+      total_records_match = re.search(total_records_re, fragment, re.MULTILINE | re.DOTALL)
+    if total_records_match:
+      total_records = int(total_records_match.group('total_records'))
+
+    return total_records, total_size, msg
+
+
+  def _get_impala_query_id(self, snippet):
+    guid = None
+    if 'result' in snippet and 'handle' in snippet['result'] and 'guid' in snippet['result']['handle']:
+      try:
+        decoded_guid = base64.decodestring(snippet['result']['handle']['guid'])
+        guid = "%x:%x" % struct.unpack(b"QQ", decoded_guid)
+      except Exception, e:
+        LOG.warn('Failed to decode operation handle guid: %s' % e)
+    else:
+      LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
+    return guid
+
+
+  def _get_impala_server_url(self, session):
+    impala_settings = session.get_formatted_properties()
+    http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
+    return http_addr
+
+
+  def _get_impala_query_profile(self, server_url, query_id):
+    api = get_impalad_api(user=self.user, url=server_url)
+
+    try:
+      query_profile = api.get_query_profile(query_id)
+      profile = query_profile.get('profile')
+    except (RestException, ImpalaDaemonApiException), e:
+      raise PopupException(_("Failed to get query profile from Impala Daemon server: %s") % e)
+
+    if not profile:
+      raise PopupException(_("Could not find profile in query profile response from Impala Daemon Server."))
+
+    return profile

+ 25 - 3
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -659,7 +659,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
     assert_equal(0, data['status'], data)
     assert_true('result' in data)
     assert_true('rows' in data['result'])
-    assert_true(data['result']['rows'] > 0)
+    assert_equal(823, data['result']['rows'])
 
     # Assert that a query with multiple jobs returns rows
     statement = "SELECT app, COUNT(1) AS count FROM web_logs GROUP BY app ORDER BY count DESC;"
@@ -674,7 +674,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
     assert_equal(0, data['status'], data)
     assert_true('result' in data)
     assert_true('rows' in data['result'])
-    assert_true(data['result']['rows'] > 0)
+    assert_equal(23, data['result']['rows'])
 
 
   def test_fetch_result_size_spark(self):
@@ -721,5 +721,27 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
     assert_true('result' in data)
     assert_true('rows' in data['result'])
     assert_true('size' in data['result'])
-    assert_true(data['result']['rows'] > 0)
+    assert_equal(23, data['result']['rows'])
     assert_true(data['result']['size'] > 0)
+
+
+  def test_fetch_result_size_impala(self):
+    if not is_live_cluster():
+      raise SkipTest
+
+    # Assert that a query that runs a job will return rows
+    statement = "SELECT app, COUNT(1) AS count FROM web_logs GROUP BY app ORDER BY count DESC;"
+    doc = self.create_query_document(owner=self.user, query_type='impala', statement=statement)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0, timeout=60.0, wait=2.0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                              {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true('size' in data['result'])
+    assert_equal(23, data['result']['rows'])
+    assert_equal(None, data['result']['size'])