浏览代码

HUE-3331 [editor] Explain API functionality

Jenny Kim 9 年之前
父节点
当前提交
eec343d

+ 3 - 3
apps/beeswax/src/beeswax/api.py

@@ -279,7 +279,7 @@ def close_operation(request, query_history_id):
 
 
 @error_handler
-def explain_directly(request, query, design, query_server):
+def explain_directly(request, query_server, query):
   explanation = dbms.get(request.user, query_server).explain(query)
 
   response = {
@@ -325,7 +325,7 @@ def execute(request, design_id=None):
 
             try:
               if explain:
-                return explain_directly(request, query, design, query_server)
+                return explain_directly(request, query_server, query)
               else:
                 return execute_directly(request, query, design, query_server, parameters=parameters)
 
@@ -341,7 +341,7 @@ def execute(request, design_id=None):
       # Non-parameterized query
       query = HQLdesign(query_form, query_type=query_type)
       if request.GET.get('explain', 'false').lower() == 'true':
-        return explain_directly(request, query, design, query_server)
+        return explain_directly(request, query_server, query)
       else:
         return execute_directly(request, query, design, query_server)
     else:

+ 61 - 55
apps/beeswax/src/beeswax/test_base.py

@@ -350,14 +350,18 @@ class BeeswaxSampleProvider(object):
   Setup the test db and install sample data
   """
   @classmethod
-  def setup_class(cls):
+  def setup_class(cls, load_data=True):
+    cls.load_data = load_data
+
     cls.db_name = get_db_prefix(name='hive')
     cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name)
     cls.set_execution_engine()
+
     cls.client = make_logged_in_client(username='test', is_superuser=False)
     add_to_group('test', 'test')
     grant_access('test', 'test', 'beeswax')
     grant_access('test', 'test', 'metastore')
+
     # Weird redirection to avoid binding nonsense.
     cls.shutdown = [ shutdown ]
     cls.init_beeswax_db()
@@ -417,61 +421,63 @@ class BeeswaxSampleProvider(object):
     make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s' % {'db': cls.db_name}, wait=True)
     make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s_other' % {'db': cls.db_name}, wait=True)
 
-    data_file = cls.cluster.fs_prefix + u'/beeswax/sample_data_échantillon_%d.tsv'
+    if cls.load_data:
+
+      data_file = cls.cluster.fs_prefix + u'/beeswax/sample_data_échantillon_%d.tsv'
+
+      # Create a "test_partitions" table.
+      CREATE_PARTITIONED_TABLE = """
+        CREATE TABLE `%(db)s`.`test_partitions` (foo INT, bar STRING)
+        PARTITIONED BY (baz STRING, boom STRING)
+        ROW FORMAT DELIMITED
+          FIELDS TERMINATED BY '\t'
+          LINES TERMINATED BY '\n'
+      """ % {'db': cls.db_name}
+      make_query(cls.client, CREATE_PARTITIONED_TABLE, wait=True)
+      cls._make_data_file(data_file % 1)
+
+      LOAD_DATA = """
+        LOAD DATA INPATH '%(data_file)s'
+        OVERWRITE INTO TABLE `%(db)s`.`test_partitions`
+        PARTITION (baz='baz_one', boom='boom_two')
+      """ % {'db': cls.db_name, 'data_file': data_file % 1}
+      make_query(cls.client, LOAD_DATA, wait=True, local=False)
+
+      # Insert additional partition data into "test_partitions" table
+      ADD_PARTITION = """
+        ALTER TABLE `%(db)s`.`test_partitions` ADD PARTITION(baz='baz_foo', boom='boom_bar') LOCATION '%(fs_prefix)s/baz_foo/boom_bar'
+      """ % {'db': cls.db_name, 'fs_prefix': cls.cluster.fs_prefix}
+      make_query(cls.client, ADD_PARTITION, wait=True, local=False)
+
+      # Create a bunch of other tables
+      CREATE_TABLE = """
+        CREATE TABLE `%(db)s`.`%(name)s` (foo INT, bar STRING)
+        COMMENT "%(comment)s"
+        ROW FORMAT DELIMITED
+          FIELDS TERMINATED BY '\t'
+          LINES TERMINATED BY '\n'
+      """
+
+      # Create a "test" table.
+      table_info = {'db': cls.db_name, 'name': 'test', 'comment': 'Test table'}
+      cls._make_data_file(data_file % 2)
+      cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 2)
 
-    # Create a "test_partitions" table.
-    CREATE_PARTITIONED_TABLE = """
-      CREATE TABLE `%(db)s`.`test_partitions` (foo INT, bar STRING)
-      PARTITIONED BY (baz STRING, boom STRING)
-      ROW FORMAT DELIMITED
-        FIELDS TERMINATED BY '\t'
-        LINES TERMINATED BY '\n'
-    """ % {'db': cls.db_name}
-    make_query(cls.client, CREATE_PARTITIONED_TABLE, wait=True)
-    cls._make_data_file(data_file % 1)
-
-    LOAD_DATA = """
-      LOAD DATA INPATH '%(data_file)s'
-      OVERWRITE INTO TABLE `%(db)s`.`test_partitions`
-      PARTITION (baz='baz_one', boom='boom_two')
-    """ % {'db': cls.db_name, 'data_file': data_file % 1}
-    make_query(cls.client, LOAD_DATA, wait=True, local=False)
-
-    # Insert additional partition data into "test_partitions" table
-    ADD_PARTITION = """
-      ALTER TABLE `%(db)s`.`test_partitions` ADD PARTITION(baz='baz_foo', boom='boom_bar') LOCATION '%(fs_prefix)s/baz_foo/boom_bar'
-    """ % {'db': cls.db_name, 'fs_prefix': cls.cluster.fs_prefix}
-    make_query(cls.client, ADD_PARTITION, wait=True, local=False)
-
-    # Create a bunch of other tables
-    CREATE_TABLE = """
-      CREATE TABLE `%(db)s`.`%(name)s` (foo INT, bar STRING)
-      COMMENT "%(comment)s"
-      ROW FORMAT DELIMITED
-        FIELDS TERMINATED BY '\t'
-        LINES TERMINATED BY '\n'
-    """
-
-    # Create a "test" table.
-    table_info = {'db': cls.db_name, 'name': 'test', 'comment': 'Test table'}
-    cls._make_data_file(data_file % 2)
-    cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 2)
-
-    if is_live_cluster():
-      LOG.warn('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
-    else:
-      # Create a "test_utf8" table.
-      table_info = {'db': cls.db_name, 'name': 'test_utf8', 'comment': cls.get_i18n_table_comment()}
-      cls._make_i18n_data_file(data_file % 3, 'utf-8')
-      cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 3)
-
-      # Create a "test_latin1" table.
-      table_info = {'db': cls.db_name, 'name': 'test_latin1', 'comment': cls.get_i18n_table_comment()}
-      cls._make_i18n_data_file(data_file % 4, 'latin1')
-      cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 4)
-
-    # Create a "myview" view.
-    make_query(cls.client, "CREATE VIEW `%(db)s`.`myview` (foo, bar) as SELECT * FROM `%(db)s`.`test`" % {'db': cls.db_name}, wait=True)
+      if is_live_cluster():
+        LOG.warn('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
+      else:
+        # Create a "test_utf8" table.
+        table_info = {'db': cls.db_name, 'name': 'test_utf8', 'comment': cls.get_i18n_table_comment()}
+        cls._make_i18n_data_file(data_file % 3, 'utf-8')
+        cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 3)
+
+        # Create a "test_latin1" table.
+        table_info = {'db': cls.db_name, 'name': 'test_latin1', 'comment': cls.get_i18n_table_comment()}
+        cls._make_i18n_data_file(data_file % 4, 'latin1')
+        cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 4)
+
+      # Create a "myview" view.
+      make_query(cls.client, "CREATE VIEW `%(db)s`.`myview` (foo, bar) as SELECT * FROM `%(db)s`.`test`" % {'db': cls.db_name}, wait=True)
 
     _INITIALIZED = True
 

+ 14 - 0
desktop/libs/notebook/src/notebook/api.py

@@ -393,6 +393,20 @@ def get_sample_data(request, server=None, database=None, table=None):
   return JsonResponse(response)
 
 
+@require_POST
+@check_document_access_permission()
+@api_error_handler
+def explain(request):
+  response = {'status': -1}
+
+  notebook = json.loads(request.POST.get('notebook', '{}'))
+  snippet = json.loads(request.POST.get('snippet', '{}'))
+
+  response = get_api(request, snippet).explain(notebook, snippet)
+
+  return JsonResponse(response)
+
+
 @require_GET
 @api_error_handler
 def github_fetch(request):

+ 17 - 1
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -32,7 +32,7 @@ LOG = logging.getLogger(__name__)
 
 try:
   from beeswax import data_export
-  from beeswax.api import _autocomplete, _get_sample_data
+  from beeswax.api import _autocomplete, _get_sample_data, explain_directly
   from beeswax.data_export import upload
   from beeswax.design import hql_query, strip_trailing_semicolon, split_statements
   from beeswax import conf as beeswax_conf
@@ -128,6 +128,7 @@ class HS2Api(Api):
 
     return response
 
+
   def _get_statements(self, hql_query):
     hql_query = strip_trailing_semicolon(hql_query)
     return [strip_trailing_semicolon(statement.strip()) for statement in split_statements(hql_query)]
@@ -265,6 +266,21 @@ class HS2Api(Api):
     return _get_sample_data(db, database, table)
 
 
+  @query_error_handler
+  def explain(self, notebook, snippet):
+    db = self._get_db(snippet)
+    response = self._get_current_statement(db, snippet)
+    query = self._prepare_hql_query(snippet, response.pop('statement'))
+
+    explanation = db.explain(query)
+
+    return {
+      'status': 0,
+      'explanation': explanation.textual,
+      'statement': query.get_query_statement(0),
+    }
+
+
   def _get_current_statement(self, db, snippet):
     # Multiquery, if not first statement or arrived to the last query
     statement_id = snippet['result']['handle'].get('statement_id', 0)

+ 63 - 2
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -16,18 +16,24 @@
 # limitations under the License.
 
 import json
+import logging
 import re
 
 from nose.tools import assert_equal, assert_true, assert_false
 
 from django.contrib.auth.models import User
+from django.core.urlresolvers import reverse
 
 from desktop.lib.django_test_util import make_logged_in_client
 from desktop.lib.test_utils import add_to_group, grant_access
+from desktop.models import Document2
 from notebook.connectors.hiveserver2 import HS2Api
 
 from beeswax.server import dbms
-from beeswax.test_base import get_query_server_config
+from beeswax.test_base import BeeswaxSampleProvider, get_query_server_config
+
+
+LOG = logging.getLogger(__name__)
 
 
 class TestHiveserver2Api(object):
@@ -35,11 +41,11 @@ class TestHiveserver2Api(object):
   def setUp(self):
     self.client = make_logged_in_client(username="test", groupname="test", recreate=False, is_superuser=False)
     self.user = User.objects.get(username='test')
+
     add_to_group('test')
     grant_access("test", "test", "notebook")
 
     self.db = dbms.get(self.user, get_query_server_config())
-    #self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/test')
     self.api = HS2Api(self.user)
 
 
@@ -96,3 +102,58 @@ class TestHiveserver2Api(object):
     pattern = re.compile("ADD JAR hdfs://[A-Za-z0-9.:_-]+/user/test/myudfs.jar")
     assert_true(pattern.search(config_statements), config_statements)
     assert_true("CREATE TEMPORARY FUNCTION myUpper AS 'org.hue.udf.MyUpper'" in config_statements, config_statements)
+
+
+class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
+
+  @classmethod
+  def setup_class(cls):
+    super(TestHiveserver2ApiWithHadoop, cls).setup_class(load_data=False)
+
+  def setUp(self):
+    self.user = User.objects.get(username='test')
+
+    grant_access("test", "test", "notebook")
+
+    self.db = dbms.get(self.user, get_query_server_config())
+    self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/test')
+    self.api = HS2Api(self.user)
+
+
+  def test_explain(self):
+    notebook_json = """
+      {
+        "uuid": "f5d6394d-364f-56e8-6dd3-b1c5a4738c52",
+        "id": 1234,
+        "sessions": [{"type": "hive", "properties": [], "id": null}]
+      }
+    """
+    statement = 'SELECT description, salary FROM sample_07 WHERE (sample_07.salary > 100000) ORDER BY salary DESC LIMIT 1000'
+    snippet_json = """
+      {
+          "status": "running",
+          "database": "default",
+          "id": "d70d31ee-a62a-4854-b2b1-b852f6a390f5",
+          "result": {
+              "type": "table",
+              "handle": {},
+              "id": "ca11fcb1-11a5-f534-8200-050c8e1e57e3"
+          },
+          "statement": "%(statement)s",
+          "type": "hive",
+          "properties": {
+              "files": [],
+              "functions": [],
+              "settings": []
+          }
+      }
+    """ % {'statement': statement}
+
+    Document2.objects.create(id=1234, name='Test Hive Query', type='query-hive', owner=self.user, is_history=True, data=notebook_json)
+
+    response = self.client.post(reverse('notebook:explain'), {'notebook': notebook_json, 'snippet': snippet_json})
+    data = json.loads(response.content)
+
+    assert_equal(0, data['status'], data)
+    assert_true('STAGE DEPENDENCIES' in data['explanation'], data)
+    assert_equal(statement, data['statement'], data)

+ 9 - 7
desktop/libs/notebook/src/notebook/urls.py

@@ -57,6 +57,8 @@ urlpatterns += patterns('notebook.api',
   url(r'^api/close_statement/?$', 'close_statement', name='close_statement'),
   url(r'^api/get_logs/?$', 'get_logs', name='get_logs'),
 
+  url(r'^api/explain/?$', 'explain', name='explain'),
+
   url(r'^api/historify/?$', 'historify', name='historify'),
   url(r'^api/get_history/?', 'get_history', name='get_history'),
   url(r'^api/clear_history/?', 'clear_history', name='clear_history'),
@@ -68,13 +70,6 @@ urlpatterns += patterns('notebook.api',
   url(r'^api/notebook/export_result/?$', 'export_result', name='export_result'),
 )
 
-# Github
-urlpatterns += patterns('notebook.api',
-  url(r'^api/github/fetch/?$', 'github_fetch', name='github_fetch'),
-  url(r'^api/github/authorize/?$', 'github_authorize', name='github_authorize'),
-  url(r'^api/github/callback/?$', 'github_callback', name='github_callback'),
-)
-
 # Assist API
 urlpatterns += patterns('notebook.api',
   # HS2, RDBMS, JDBC
@@ -89,4 +84,11 @@ urlpatterns += patterns('notebook.api',
   url(r'^api/autocomplete/(?P<server>\w+)/(?P<database>[\w._\-0-9]+)/?$', 'autocomplete', name='api_autocomplete_tables'),
   url(r'^api/autocomplete/(?P<server>\w+)/(?P<database>[\w._\-0-9]+)/(?P<table>\w+)/?$', 'autocomplete', name='api_autocomplete_columns'),
   url(r'^api/sample/(?P<server>\w+)/(?P<database>[\w._\-0-9]+)/(?P<table>\w+)/?$', 'get_sample_data', name='api_sample_data'),
+)
+
+# Github
+urlpatterns += patterns('notebook.api',
+  url(r'^api/github/fetch/?$', 'github_fetch', name='github_fetch'),
+  url(r'^api/github/authorize/?$', 'github_authorize', name='github_authorize'),
+  url(r'^api/github/callback/?$', 'github_callback', name='github_callback'),
 )