Browse Source

HUE-7877 [dashboard] Automatically pull SQL statement of query history

Romain Rigaux 7 years ago
parent
commit
8a83aa7

+ 6 - 3
apps/beeswax/src/beeswax/api.py

@@ -34,6 +34,7 @@ from desktop.lib.i18n import force_unicode
 from desktop.lib.parameterization import substitute_variables
 from metastore import parser
 from notebook.models import escape_rows
+from indexer.file_format import HiveFormat
 
 import beeswax.models
 
@@ -108,15 +109,17 @@ def _autocomplete(db, database=None, table=None, column=None, nested=None, query
       tables_meta = db.get_tables_meta(database=database)
       response['tables_meta'] = tables_meta
     elif column is None:
-      print 'aaaaaaaaaaaaaaaaaaaaaaaaaa ', query
       if query is not None:
         class SubQueryTable():
           def __init__(self, db, query):
             self.query = query
             # Table Properties            
             self.name = 'Test'
-            # TODO Replace 't.', type different too?
-            self.cols =  db.get_query_metadata(query).data_table.cols()
+            cols = db.get_query_metadata(query).data_table.cols()
+            for col in cols:
+              col.name = re.sub('^t\.', '', col.name)
+              col.type = HiveFormat.FIELD_TYPE_TRANSLATE.get(col.type, 'string')
+            self.cols =  cols
             self.hdfs_link = None
             self.comment = None
             self.is_impala_only = False

+ 5 - 5
desktop/core/src/desktop/templates/common_notebook_ko_components.mako

@@ -196,13 +196,13 @@ except ImportError, e:
         </li>
         % if ENABLE_SQL_INDEXER.get():
         <li>
-          <a class="download" href="javascript:void(0)" data-bind="click: function() { saveTarget('search-index'); savePath('__hue__'); trySaveResults(); }" title="${ _('Explore result in a dashboard') }">
+          <a class="download" href="javascript:void(0)" data-bind="click: function() { saveTarget('search-index'); savePath('__hue__'); trySaveResults(); }" title="${ _('Visually explore the result') }">
             <!-- ko template: { name: 'app-icon-template', data: { icon: 'dashboard' } } --><!-- /ko --> ${ _('Dashboard') }
           </a>
         </li>
         % endif
         <li>
-          <a class="download" href="javascript:void(0)" data-bind="click: function() { savePath(''); $('#saveResultsModal').modal('show'); }" title="${ _('Export the result into a collection, a new table...') }">
+          <a class="download" href="javascript:void(0)" data-bind="click: function() { savePath(''); $('#saveResultsModal').modal('show'); }" title="${ _('Export the result into a file, an index, a new table...') }">
             <i class="fa fa-fw fa-cloud-upload"></i> ${ _('Export') }
           </a>
         </li>
@@ -275,12 +275,12 @@ except ImportError, e:
                   <input data-bind="checked: saveTarget" type="radio" name="save-results-type" value="search-index">
                   &nbsp;${ _('Collection') }
                 </label>
+                <div class="inline-block" data-bind="tooltip: { title: '${ _ko("Index the data to make Dashboard explorations faster") }', placement: 'top' }">
+                  <i class="fa fa-fw fa-question-circle muted"></i>
+                </div>
                 <div data-bind="visible: saveTarget() == 'search-index'" class="inline">
                   <input data-bind="value: savePath, valueUpdate:'afterkeydown'" type="text" name="target_index" class="input-xlarge margin-left-10" placeholder="${_('Collection name')}">
                 </div>
-                <div class="inline-block" data-bind="visible: saveTarget() == 'search-index', tooltip: { title: '${ _ko("Index the data to make exploration faster") }', placement: 'top' }" style="padding: 8px">
-                  <i class="fa fa-fw fa-question-circle muted"></i>
-                </div>
               </div>
             </div>
             % endif

+ 2 - 1
desktop/libs/dashboard/src/dashboard/api.py

@@ -103,8 +103,9 @@ def index_fields_dynamic(request):
   try:
     name = request.POST['name']
     engine = request.POST['engine']
+    source = request.POST.get('source')
 
-    dynamic_fields = get_engine(request.user, engine).luke(name)
+    dynamic_fields = get_engine(request.user, engine, source=source).luke(name)
 
     result['message'] = ''
     result['fields'] = [

+ 1 - 1
desktop/libs/dashboard/src/dashboard/dashboard_api.py

@@ -22,7 +22,7 @@ LOG = logging.getLogger(__name__)
 
 
 def get_engine(user, engine='solr', facet=None, source='data'):
-  if isinstance(engine, dict):    
+  if isinstance(engine, dict):
     if source == 'data':
       source = engine.get('source')
     engine = engine.get('engine', 'solr')

+ 2 - 1
desktop/libs/dashboard/src/dashboard/static/dashboard/js/search.ko.js

@@ -1369,7 +1369,8 @@ var Collection = function (vm, collection) {
   self.syncDynamicFields = function () {
     $.post("/dashboard/index/fields/dynamic", {
         name: self.name(),
-        engine: self.engine()
+        engine: self.engine(),
+        source: self.source()
       }, function (data) {
         if (data.status == 0) {
           syncArray(self.template.fieldsAttributes, data.gridlayout_header_fields, true);

+ 1 - 1
desktop/libs/dashboard/src/dashboard/views.py

@@ -157,7 +157,7 @@ def new_search(request):
 def browse(request, name, is_mobile=False):
   engine = request.GET.get('engine', 'solr')
   source = request.GET.get('source', 'data')
-  engine = 'hive'
+
   collections = get_engine(request.user, engine, source=source).datasets()
   if not collections and engine == 'solr':
     return no_collections(request)

+ 1 - 0
desktop/libs/indexer/src/indexer/file_format.py

@@ -545,6 +545,7 @@ class TextFileReader(object):
 
 
 class HiveFormat(CSVFormat):
+
   FIELD_TYPE_TRANSLATE = {
     "BOOLEAN_TYPE": "boolean",
     "TINYINT_TYPE": "long",

+ 5 - 4
desktop/libs/notebook/src/notebook/api.py

@@ -703,14 +703,15 @@ def export_result(request):
       'allowed': True
     }
   elif data_format == 'search-index':
-    ## Get query history id
+    # Open results in a Dashboard via either: a SQL sub-query or a Solr index via the wizard (and its quick indexing or Morphline job)
 
     if is_embedded:
       if destination == '__hue__':
         notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
-        response['watch_url'] = reverse('dashboard:browse', kwargs={'name': notebook_id}) + '?source=query'
-        response['status'] = 0        
-        return JsonResponse(response)
+        engine = notebook['type'].replace('query-', '')
+        response['watch_url'] = reverse('dashboard:browse', kwargs={'name': notebook_id}) + '?source=query&engine=%(engine)s' % {'engine': engine}
+        response['status'] = 0
+        return JsonResponse(response) # Currently do not live index into Solr, but uses a SQL sub-query
 
         destination = _get_snippet_name(notebook, unique=True, table_format=True)
         live_indexing = True

+ 26 - 34
desktop/libs/notebook/src/notebook/dashboard_api.py

@@ -27,10 +27,11 @@ from itertools import groupby
 from django.utils.html import escape
 
 from notebook.models import make_notebook
-from notebook.connectors.base import get_api, OperationTimeout
+from notebook.connectors.base import get_api, OperationTimeout, Notebook
 
 from dashboard.dashboard_api import DashboardApi
 from dashboard.models import Collection2, augment_response
+from desktop.models import Document2
 
 
 LOG = logging.getLogger(__name__)
@@ -52,8 +53,6 @@ class SQLDashboardApi(DashboardApi):
     self.async = engine == 'hive' or engine == 'impala'
 
   def query(self, dashboard, query, facet=None):
-    database, table = self._get_database_table_names(dashboard['name'])
-
     if query['qs'] == [{'q': '_root_:*'}]:
       return {'response': {'numFound': 0}}
 
@@ -64,6 +63,16 @@ class SQLDashboardApi(DashboardApi):
     if timeFilter:
       filters.append(timeFilter)
 
+    if self.source == 'query':
+      sql_from = '(%(query)s) t' % {'query': self._get_query(dashboard['name'])}
+      database, table = '', ''
+    else:
+      database, table = self._get_database_table_names(dashboard['name'])
+      sql_from = '`%(database)s`.`%(table)s`' % {
+        'database': database,
+        'table': table
+      }
+
     if facet and facet['properties']['facets']:
       for i, _facet in enumerate(facet['properties']['facets']):
         _facet['position'] = i
@@ -94,18 +103,12 @@ class SQLDashboardApi(DashboardApi):
             mincount_fields_operation.append('COUNT(*) OVER (PARTITION BY %s) AS %s' % (', '.join(mincount_fields_name), mincount_field_name) )
             mincount_where.append('%s >= %s' % (mincount_field_name, str(f['mincount'])))
           sql_from = '''(SELECT * FROM (SELECT *, %(fields)s
-          FROM %(database)s.%(table)s) default
+          FROM %(sql_from)s) default
           WHERE %(where)s) default''' % {
             'fields': ', '.join(mincount_fields_operation),
-            'database': database,
-            'table': table,
+            'sql_from': sql_from,
             'where': ' AND '.join(mincount_where)
           }
-        else:
-          sql_from = '%(database)s.%(table)s' % {
-            'database': database,
-            'table': table
-          }
 
         order_by = ', '.join([self._get_dimension_field(f)['order_by'] for f in reversed(facet['properties']['facets']) if f['sort'] != 'default'])
 
@@ -130,19 +133,13 @@ class SQLDashboardApi(DashboardApi):
           FROM
           (
             SELECT %(field)s, cume_dist() OVER (ORDER BY %(field)s) * 100 AS cume_dist__%(field)s
-            FROM %(database)s.%(table)s
+            FROM %(sql_from)s
           ) DEFAULT
           WHERE cume_dist__%(field)s >= %(value)s) DEFAULT
           ''' % {
             'field': facet['properties']['facets'][0]['field'],
             'value': facet['properties']['facets'][0]['aggregate']['percentile'] if aggregate_function == 'percentile' else 50,
-            'database': database,
-            'table': table
-          }
-        else:
-          sql_from = '%(database)s.%(table)s' % {
-            'database': database,
-            'table': table
+            'sql_from': sql_from,
           }
 
         sql = '''SELECT %(fields)s
@@ -155,21 +152,10 @@ class SQLDashboardApi(DashboardApi):
       elif facet['type'] == 'statement':
         sql = facet['properties']['statement']
     else:
-      print '==================================================================== ', self.source
-      print '===================================================================='
       fields = Collection2.get_field_list(dashboard)
-      if self.source == 'query':
-        # Open snippet and get statement
-        sql_from = '(select app from web_logs) t'
-        database, table = '', ''
-      else:
-        sql_from = '`%(database)s`.`%(table)s`' % {
-          'database': database,
-          'table': table
-        }
       sql = "SELECT %(fields)s FROM %(sql_from)s" % {
           'sql_from': sql_from,
-          'fields': ', '.join(['`%s`' % f if f != '*' else '*' for f in fields])
+          'fields': ', '.join(['`%s` as `%s`' % (f, f) if f != '*' else '*' for f in fields])
       }
       if filters:
         sql += ' ' + self._convert_filters_to_where(filters)
@@ -226,8 +212,7 @@ class SQLDashboardApi(DashboardApi):
     snippet = {'type': self.engine}
 
     if self.source == 'query':
-      # Open snippet and get statement
-      snippet['query'] = 'select app from web_logs'
+      snippet['query'] = self._get_query(name)
       database, table = '', ''
     else:
       database, table = self._get_database_table_names(name)
@@ -653,6 +638,13 @@ class SQLDashboardApi(DashboardApi):
     return database, table_name
 
 
+  def _get_query(self, name):
+    nb_doc = Document2.objects.document(user=self.user, doc_id=name)
+    notebook = Notebook(document=nb_doc).get_data()
+    snippet = notebook['snippets'][0]
+    return snippet['statement']
+
+
   def _convert_notebook_facet(self, result, facet, query):
     response = json.loads('''{
    "fieldsAttributes":[],
@@ -757,7 +749,7 @@ class SQLDashboardApi(DashboardApi):
 
 
   def _convert_notebook_results(self, result, dashboard, query):
-    cols = [col['name'] for col in result['meta']]
+    cols = [col['name'] if self.source == 'data' else re.sub('^t\.', '', col['name']) for col in result['meta']]
 
     docs = []
     for row in result['data']: