Browse Source

HUE-7580 [metadata] Split in two ddl and stats uploads

query and DDL stays on by default.
stats (table and columns) are off by default (as pretty slow).
Romain Rigaux 8 years ago
parent
commit
d1ca684

+ 6 - 0
desktop/conf.dist/hue.ini

@@ -1735,6 +1735,12 @@
     # Automatically upload queries after their execution in order to improve recommendations.
     ## auto_upload_queries=true
 
+    # Automatically upload queried tables DDL in order to improve recommendations.
+    ## auto_upload_ddl=true
+
+    # Automatically upload queried tables and columns stats in order to improve recommendations.
+    ## auto_upload_stats=false
+
     # Allow admins to upload the last N executed queries in the quick start wizard. Use 0 to disable.
     ## query_history_upload_limit=10000
 

+ 6 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1739,6 +1739,12 @@
     # Automatically upload queries after their execution in order to improve recommendations.
     ## auto_upload_queries=true
 
+    # Automatically upload queried tables DDL in order to improve recommendations.
+    ## auto_upload_ddl=true
+
+    # Automatically upload queried tables and columns stats in order to improve recommendations.
+    ## auto_upload_stats=false
+
     # Allow admins to upload the last N executed queries in the quick start wizard. Use 0 to disable.
     ## query_history_upload_limit=10000
 

+ 11 - 5
desktop/core/src/desktop/templates/assist.mako

@@ -17,17 +17,19 @@
 <%!
 from django.utils.translation import ugettext as _
 
+
+from indexer.conf import ENABLE_NEW_INDEXER
+from metadata.conf import has_navigator, OPTIMIZER
+from metastore.conf import ENABLE_NEW_CREATE_TABLE
+from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ordered_interpreters
+
 from dashboard.conf import HAS_SQL_ENABLED
+
 from desktop import appmanager
 from desktop import conf
 from desktop.conf import USE_NEW_SIDE_PANELS, VCS
 from desktop.lib.i18n import smart_unicode
 from desktop.views import _ko
-
-from indexer.conf import ENABLE_NEW_INDEXER
-from metadata.conf import has_navigator
-from metastore.conf import ENABLE_NEW_CREATE_TABLE
-from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ordered_interpreters
 %>
 
 <%def name="assistJSModels()">
@@ -2283,7 +2285,11 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
         });
 
         self.isMissingStats = ko.pureComputed(function () {
+          % if OPTIMIZER.AUTO_UPLOAD_STATS.get():
           return self.activeRisks().noStats && self.activeRisks().noStats.length > 0;
+          % else:
+          return false;
+          % endif
         });
 
         var createQualifiedIdentifier = function (identifierChain, defaultDatabase) {

+ 12 - 0
desktop/libs/metadata/src/metadata/conf.py

@@ -136,6 +136,18 @@ OPTIMIZER = ConfigSection(
       default=True,
       type=coerce_bool
     ),
+    AUTO_UPLOAD_DDL = Config(
+      key="auto_upload_ddl",
+      help=_t("Automatically upload queried tables DDL in order to improve recommendations."),
+      default=True,
+      type=coerce_bool
+    ),
+    AUTO_UPLOAD_STATS = Config(
+      key="auto_upload_stats",
+      help=_t("Automatically upload queried tables and columns stats in order to improve recommendations."),
+      default=False,
+      type=coerce_bool
+    ),
     QUERY_HISTORY_UPLOAD_LIMIT = Config(
       key="query_history_upload_limit",
       help=_t("Allow admins to upload the last N executed queries in the quick start wizard. Use 0 to disable."),

+ 41 - 23
desktop/libs/metadata/src/metadata/optimizer_api.py

@@ -388,12 +388,20 @@ def upload_table_stats(request):
 
   db_tables = json.loads(request.POST.get('db_tables'), '[]')
   source_platform = json.loads(request.POST.get('sourcePlatform', '"hive"'))
-  with_columns = json.loads(request.POST.get('with_columns', 'false'))
   with_ddl = json.loads(request.POST.get('with_ddl', 'false'))
+  with_table_stats = json.loads(request.POST.get('with_table', 'false'))
+  with_columns_stats = json.loads(request.POST.get('with_columns', 'false'))
 
+  table_ddls = []
   table_stats = []
   column_stats = []
-  table_ddls = []
+
+  if not OPTIMIZER.AUTO_UPLOAD_DDL.get():
+    with_ddl = False
+
+  if not OPTIMIZER.AUTO_UPLOAD_STATS.get():
+    with_table_stats = with_columns_stats = False
+
 
   for db_table in db_tables:
     path = _get_table_name(db_table)
@@ -409,24 +417,25 @@ def upload_table_stats(request):
           db.close(handle)
           table_ddls.append((0, 0, ' '.join([row[0] for row in result.rows()]), path['database']))
 
-      mock_request = MockRequest(user=request.user, source_platform=source_platform)
-      full_table_stats = json.loads(get_table_stats(mock_request, database=path['database'], table=path['table']).content)
-      stats = dict((stat['data_type'], stat['comment']) for stat in full_table_stats['stats'])
-
-      table_stats.append({
-        'table_name': '%(database)s.%(table)s' % path, # DB Prefix
-        'num_rows':  stats.get('numRows', -1),
-        'last_modified_time':  stats.get('transient_lastDdlTime', -1),
-        'total_size':  stats.get('totalSize', -1),
-        'raw_data_size':  stats.get('rawDataSize', -1),
-        'num_files':  stats.get('numFiles', -1),
-        'num_partitions':  stats.get('numPartitions', -1),
-        # bytes_cached
-        # cache_replication
-        # format
-      })
-
-      if with_columns:
+      if with_table_stats:
+        mock_request = MockRequest(user=request.user, source_platform=source_platform)
+        full_table_stats = json.loads(get_table_stats(mock_request, database=path['database'], table=path['table']).content)
+        stats = dict((stat['data_type'], stat['comment']) for stat in full_table_stats['stats'])
+
+        table_stats.append({
+          'table_name': '%(database)s.%(table)s' % path, # DB Prefix
+          'num_rows':  stats.get('numRows', -1),
+          'last_modified_time':  stats.get('transient_lastDdlTime', -1),
+          'total_size':  stats.get('totalSize', -1),
+          'raw_data_size':  stats.get('rawDataSize', -1),
+          'num_files':  stats.get('numFiles', -1),
+          'num_partitions':  stats.get('numPartitions', -1),
+          # bytes_cached
+          # cache_replication
+          # format
+        })
+
+      if with_columns_stats:
         if source_platform == 'impala':
           colum_stats = json.loads(get_table_stats(mock_request, database=path['database'], table=path['table'], column=-1).content)['stats']
         else:
@@ -456,13 +465,22 @@ def upload_table_stats(request):
 
   api = OptimizerApi(request.user)
 
-  response['upload_table_stats'] = api.upload(data=table_stats, data_type='table_stats', source_platform=source_platform)
-  response['status'] = 0 if response['upload_table_stats']['status']['state'] in ('WAITING', 'FINISHED', 'IN_PROGRESS') else -1
+  response['status'] = 0
+
+  if table_stats:
+    response['upload_table_stats'] = api.upload(data=table_stats, data_type='table_stats', source_platform=source_platform)
+    response['upload_table_stats_status'] = 0 if response['upload_table_stats']['status']['state'] in ('WAITING', 'FINISHED', 'IN_PROGRESS') else -1
+    response['status'] = response['upload_table_stats_status']
   if column_stats:
     response['upload_cols_stats'] = api.upload(data=column_stats, data_type='cols_stats', source_platform=source_platform)
-    response['status'] = response['status'] if response['upload_cols_stats']['status']['state'] in ('WAITING', 'FINISHED', 'IN_PROGRESS') else -1
+    response['upload_cols_stats_status'] = response['status'] if response['upload_cols_stats']['status']['state'] in ('WAITING', 'FINISHED', 'IN_PROGRESS') else -1
+    if response['upload_cols_stats_status'] != 0:
+      response['status'] = response['upload_cols_stats_status']
   if table_ddls:
     response['upload_table_ddl'] = api.upload(data=table_ddls, data_type='queries', source_platform=source_platform)
+    response['upload_table_ddl_status'] = response['status'] if response['upload_table_ddl']['status']['state'] in ('WAITING', 'FINISHED', 'IN_PROGRESS') else -1
+    if response['upload_table_ddl_status'] != 0:
+      response['status'] = response['upload_table_ddl_status']
 
   return JsonResponse(response)
 

+ 4 - 3
desktop/libs/notebook/src/notebook/static/notebook/js/notebook.ko.js

@@ -1898,14 +1898,15 @@ var EditorViewModel = (function() {
           return table.databaseName + '.' + table.tableName;
         })),
         sourcePlatform: ko.mapping.toJSON(self.type()),
-        with_columns: ko.mapping.toJSON(true),
-        with_ddl: ko.mapping.toJSON(true)
+        with_ddl: ko.mapping.toJSON(true),
+        with_table_stats: ko.mapping.toJSON(true),
+        with_columns_stats: ko.mapping.toJSON(true)
       }, function(data) {
         if (data.status == 0) {
           if (options.showProgress) {
             $(document).trigger("info", $.map(options.activeTables, function(table) { return table.tableName; }) + " stats sent to analyse");
           }
-          if (data.upload_table_ddl) {
+          if (data.upload_table_ddl && options.showProgress) { // With showProgress only currently as can be very slow
             self.watchUploadStatus(data.upload_table_ddl.status.workloadId, options.showProgress);
           }
         } else {

+ 11 - 5
desktop/libs/notebook/src/notebook/templates/editor_components.mako

@@ -21,7 +21,7 @@ from desktop import conf
 from desktop.lib.i18n import smart_unicode
 from desktop.views import _ko, antixss
 
-from metadata.conf import has_optimizer
+from metadata.conf import has_optimizer, OPTIMIZER
 from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, ENABLE_BATCH_EXECUTE, ENABLE_EXTERNAL_STATEMENT, ENABLE_PRESENTATION
 %>
 
@@ -3160,17 +3160,23 @@ ${ sqlSyntaxDropdown.sqlSyntaxDropdown() }
       }
 
       if (viewModel.isOptimizerEnabled()) {
+        % if OPTIMIZER.AUTO_UPLOAD_QUERIES.get():
+        huePubSub.subscribe("editor.upload.query", function (query_id) {
+          viewModel.selectedNotebook().snippets()[0].uploadQuery(query_id);
+        }, HUE_PUB_SUB_EDITOR_ID);
+        % endif
+
+        % if OPTIMIZER.AUTO_UPLOAD_DDL.get():
         huePubSub.subscribe('editor.upload.table.stats', function (options) {
           viewModel.selectedNotebook().snippets()[0].uploadTableStats(options);
         }, HUE_PUB_SUB_EDITOR_ID);
+        % endif
 
+        % if OPTIMIZER.QUERY_HISTORY_UPLOAD_LIMIT.get() != 0:
         huePubSub.subscribe("editor.upload.history", function () {
           viewModel.selectedNotebook().snippets()[0].uploadQueryHistory(5);
         }, HUE_PUB_SUB_EDITOR_ID);
-
-        huePubSub.subscribe("editor.upload.query", function (query_id) {
-          viewModel.selectedNotebook().snippets()[0].uploadQuery(query_id);
-        }, HUE_PUB_SUB_EDITOR_ID);
+        % endif
       }
 
       viewModel.selectedNotebook.subscribe(function (newVal) {