Эх сурвалжийг харах

HUE-7955 [importer] Properly load the prefilling of other sources than manual or file

Romain Rigaux 7 жил өмнө
parent
commit
271f5c1

+ 7 - 7
desktop/core/src/desktop/templates/common_notebook_ko_components.mako

@@ -279,7 +279,7 @@ except ImportError, e:
                   <i class="fa fa-fw fa-question-circle muted"></i>
                 </div>
                 <div data-bind="visible: saveTarget() == 'search-index'" class="inline">
-                  <input data-bind="value: savePath, valueUpdate:'afterkeydown'" type="text" name="target_index" class="input-xlarge margin-left-10" placeholder="${_('Collection name')}">
+                  <input data-bind="value: savePath, valueUpdate: 'afterkeydown'" type="text" name="target_index" class="input-xlarge margin-left-10" placeholder="${_('Collection name')}">
                 </div>
               </div>
             </div>
@@ -413,16 +413,16 @@ except ImportError, e:
             if (resp.status == 0) {
               if (IS_HUE_4) {
                 $(".modal-backdrop").remove();
-                if (self.saveTarget() == 'hdfs-file' || self.saveTarget() == 'search-index') {
+                if (self.saveTarget() == 'hdfs-file') {
                   $(self.saveResultsModalId).modal('hide');
                   huePubSub.publish('open.link', resp.watch_url);
-                } else {
-                  if (resp.history_uuid) {
+                } else if (self.saveTarget() == 'search-index') {
+                  huePubSub.publish('open.importer.query', resp);
+                } else if (resp.history_uuid) {
                     $(self.saveResultsModalId).modal('hide');
                     huePubSub.publish('notebook.task.submitted', resp.history_uuid);
-                  } else if (resp && resp.message) {
-                    $(document).trigger("error", resp.message);
-                  }
+                } else if (resp && resp.message) {
+                  $(document).trigger("error", resp.message);
                 }
               } else {
                 window.location.href = resp.watch_url;

+ 19 - 0
desktop/core/src/desktop/templates/hue.mako

@@ -757,6 +757,25 @@ ${ smart_unicode(login_modal(request).content) | n,unicode }
           })
         });
 
+        huePubSub.subscribe('open.importer.query', function (data) {
+          self.loadApp('importer');
+          self.getActiveAppViewModel(function (viewModel) {
+            hueUtils.waitForVariable(viewModel.createWizard, function(){
+              hueUtils.waitForVariable(viewModel.createWizard.prefill, function(){
+                viewModel.createWizard.prefill.source_type(data['source_type']);
+                viewModel.createWizard.prefill.target_type(data['target_type']);
+                viewModel.createWizard.prefill.target_path(data['target_path']);
+              });
+              hueUtils.waitForVariable(viewModel.createWizard.source.query, function(){
+                viewModel.createWizard.source.query({"id": data.id}); // TODO load in dropdown to be cleaner
+              });
+              hueUtils.waitForVariable(viewModel.createWizard.loadSampleData, function(){
+                viewModel.createWizard.loadSampleData(data);
+              });
+            });
+          })
+        });
+
         huePubSub.subscribe('resize.form.actions', function () {
           document.styleSheets[0].addRule('.form-actions','width: ' + $('.page-content').width() + 'px');
           if ($('.content-panel:visible').length > 0) {

+ 43 - 38
desktop/libs/indexer/src/indexer/api3.py

@@ -152,7 +152,10 @@ def guess_field_types(request):
     snippet = notebook['snippets'][0]
     db = get_api(request, snippet)
 
-    if file_format['query'].get('id'):
+    if file_format.get('sampleCols'):
+      columns = file_format.get('sampleCols')
+      sample = file_format.get('sample')
+    else:
       snippet['query'] = snippet['statement'] #self._get_current_statement(db, snippet) # TODO multi statement
       try:
         sample = db.fetch_result(notebook, snippet, 4, start_over=True)['rows'][:4]
@@ -160,25 +163,27 @@ def guess_field_types(request):
         LOG.warn('Skipping sample data as query handle might be expired: %s' % e)
         sample = [[], [], [], [], []]
       columns = db.autocomplete(snippet=snippet, database='', table='')
-      format_ = {
-          "sample": sample,
-          "columns": [
-              Field(col['name'], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
-              for col in columns['extended_columns']
-          ],
-          "hs2_handle": None # HS2 there and valid? add sample
-      }
-    else:
-      sample = db.fetch_result(notebook, snippet, 4, start_over=True)
-
-      format_ = {
-          "sample": sample['rows'][:4],
-          #"sample_cols": sample.meta,
-          "columns": [
-              Field(col['name'], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
-              for col in sample.meta
-          ]
-      }
+      columns = [
+          Field(col['name'], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
+          for col in columns['extended_columns']
+      ]
+    format_ = {
+        "sample": sample,
+        "columns": columns,
+        "hs2_handle": None # HS2 there and valid? add sample
+    }
+#     else:
+#       format_ = {'status': 3}
+#       sample = db.fetch_result(notebook, snippet, 4, start_over=True)
+# 
+#       format_ = {
+#           "sample": sample['rows'][:4],
+#           #"sample_cols": sample.meta,
+#           "columns": [
+#               Field(col['name'], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
+#               for col in sample.meta
+#           ]
+#       }
   elif file_format['inputFormat'] == 'rdbms':
     query_server = rdbms.get_query_server_config(server=file_format['rdbmsType'])
     db = rdbms.get(request.user, query_server=query_server)
@@ -284,25 +289,25 @@ def _small_indexing(user, fs, client, source, destination, index_name):
 
   if source['inputFormat'] == 'file':
     data = fs.read(source["path"], 0, MAX_UPLOAD_SIZE)
-    
+
+  try:
+    if source['inputFormat'] == 'query':
+      #   elif file_format['inputFormat'] == 'hs2_handle':
+      searcher = CollectionManagerController(user)
+      columns = fields#['_uuid'] + [field['name'] for field in file_format['columns']]
+      return searcher.update_data_from_hive(index_name, columns, fetch_handle=file_format['fetch_handle'])
+      ## live HS2
+    else:      
+      if client.is_solr_six_or_more():
+        kwargs['processor'] = 'tolerant'
+      response = client.index(name=index_name, data=data, **kwargs)
+      errors = [error.get('message', '') for error in response['responseHeader'].get('errors', [])]
+  except Exception, e:
     try:
-      if source['inputFormat'] == 'query':
-        #   elif file_format['inputFormat'] == 'hs2_handle':
-        searcher = CollectionManagerController(user)
-        columns = fields#['_uuid'] + [field['name'] for field in file_format['columns']]
-        return searcher.update_data_from_hive(index_name, columns, fetch_handle=file_format['fetch_handle'])
-        ## live HS2
-      else:      
-        if client.is_solr_six_or_more():
-          kwargs['processor'] = 'tolerant'
-        response = client.index(name=index_name, data=data, **kwargs)
-        errors = [error.get('message', '') for error in response['responseHeader'].get('errors', [])]
-    except Exception, e:
-      try:
-        client.delete_index(index_name, keep_config=False)
-      except Exception, e2:
-        LOG.warn('Error while cleaning-up config of failed collection creation %s: %s' % (index_name, e2))
-      raise e
+      client.delete_index(index_name, keep_config=False)
+    except Exception, e2:
+      LOG.warn('Error while cleaning-up config of failed collection creation %s: %s' % (index_name, e2))
+    raise e
 
   return {'status': 0, 'on_success_url': reverse('indexer:indexes', kwargs={'index': index_name}), 'pub_sub_url': 'assist.collections.refresh', 'errors': errors}
 

+ 19 - 14
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -1228,7 +1228,7 @@ ${ assist.assistPanel() }
       self.sample = ko.observableArray();
       self.sampleCols = ko.observableArray();
 
-      self.inputFormat = ko.observable(wizard.prefill.source_type() == 'manual' ? 'manual' : 'file');
+      self.inputFormat = ko.observable(wizard.prefill.source_type() ? wizard.prefill.source_type() : 'file');
 
       self.inputFormat.subscribe(function(val) {
         wizard.destination.columns.removeAll();
@@ -1614,6 +1614,9 @@ ${ assist.assistPanel() }
           if (wizard.source.query()) {
             name = wizard.source.name();
           }
+          if (wizard.prefill.target_path().length > 0) {
+            name = wizard.prefill.target_path();
+          }
         } else if (wizard.source.inputFormat() == 'manual') {
           name = wizard.prefill.target_path().length > 0 ? wizard.prefill.target_path() + '.' : '';
         }
@@ -1790,7 +1793,7 @@ ${ assist.assistPanel() }
       self.prefill = ko.mapping.fromJS(${prefill | n});
 
       self.prefill.source_type.subscribe(function(newValue) {
-        self.source.inputFormat(newValue == 'manual' ? 'manual' : 'file');
+        self.source.inputFormat(newValue ? newValue : 'file');
       });
 
       self.show = ko.observable(true);
@@ -1904,17 +1907,7 @@ ${ assist.assistPanel() }
         guessFieldTypesXhr = $.post("${ url('indexer:guess_field_types') }", {
           "fileFormat": ko.mapping.toJSON(self.source)
         }, function (resp) {
-          resp.columns.forEach(function (entry, i, arr) {
-            if (self.destination.outputFormat() === 'table') {
-              entry.type = MAPPINGS.get(MAPPINGS.SOLR_TO_HIVE, entry.type, 'string');
-            } else if (self.destination.outputFormat() === 'index') {
-              entry.type = MAPPINGS.get(MAPPINGS.HIVE_TO_SOLR, entry.type, entry.type);
-            }
-            arr[i] = loadField(entry, self.destination, i);
-          });
-          self.source.sampleCols(resp.sample_cols ? resp.sample_cols : resp.columns);
-          self.source.sample(resp.sample);
-          self.destination.columns(resp.columns);
+          self.loadSampleData(resp);
           self.isGuessingFieldTypes(false);
         }).fail(function (xhr, textStatus, errorThrown) {
           $(document).trigger("error", xhr.responseText);
@@ -1922,7 +1915,19 @@ ${ assist.assistPanel() }
           viewModel.isLoading(false);
         });
       };
-
+      self.loadSampleData = function(resp) {
+        resp.columns.forEach(function (entry, i, arr) {
+          if (self.destination.outputFormat() === 'table') {
+            entry.type = MAPPINGS.get(MAPPINGS.SOLR_TO_HIVE, entry.type, 'string');
+          } else if (self.destination.outputFormat() === 'index') {
+            entry.type = MAPPINGS.get(MAPPINGS.HIVE_TO_SOLR, entry.type, entry.type);
+          }
+          arr[i] = loadField(entry, self.destination, i);
+        });
+        self.source.sampleCols(resp.sample_cols ? resp.sample_cols : resp.columns);
+        self.source.sample(resp.sample);
+        self.destination.columns(resp.columns);
+      };
       self.isIndexing = ko.observable(false);
       self.indexingError = ko.observable(false);
       self.indexingSuccess = ko.observable(false);

+ 42 - 20
desktop/libs/notebook/src/notebook/api.py

@@ -707,43 +707,65 @@ def export_result(request):
     # Open the result in the Dashboard via a SQL sub-query or the Import wizard (and its quick  or scalable indexer)
 
     if is_embedded:
-      if destination == '__hue__':
-        notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
+      notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
+
+      if destination == '__hue__':        
         engine = notebook['type'].replace('query-', '')
         response['watch_url'] = reverse('dashboard:browse', kwargs={'name': notebook_id}) + '?source=query&engine=%(engine)s' % {'engine': engine}
         response['status'] = 0
         return JsonResponse(response) # Currently do not live index into Solr, but uses a SQL sub-query
 
         destination = _get_snippet_name(notebook, unique=True, table_format=True)
-        live_indexing = True
+#         live_indexing = True
       else:
         live_indexing = False
 
-      sample = get_api(request, snippet).fetch_result(notebook, snippet, 0, start_over=True)
 
-      from indexer.api3 import _index # Will be moved to the lib
+      ## TODO
+      ## Add to response query id and sample data
+
+      sample = get_api(request, snippet).fetch_result(notebook, snippet, rows=4, start_over=True)
+
+#       from indexer.api3 import _index # Will be moved to the lib
       from indexer.file_format import HiveFormat
       from indexer.fields import Field
-
-      file_format = {
-          'name': 'col',
-          'inputFormat': 'query',
-          'format': {'quoteChar': '"', 'recordSeparator': '\n', 'type': 'csv', 'hasHeader': False, 'fieldSeparator': '\u0001'},
-          "sample": '',
+      format_ = {
+          "sample": list(sample['data']),
           "columns": [
-              Field(col['name'].rsplit('.')[-1], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
+              Field(col['name'], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
               for col in sample['meta']
           ]
       }
 
-      if live_indexing:
-        file_format['inputFormat'] = 'hs2_handle'
-        file_format['fetch_handle'] = lambda rows, start_over: get_api(request, snippet).fetch_result(notebook, snippet, rows=rows, start_over=start_over)
-        response['rowcount'] = _index(request, file_format, destination, query=notebook['uuid'], start_time=start_time)
-        response['watch_url'] = reverse('search:browse', kwargs={'name': destination})
-        response['status'] = 0
-      else:
-        response = _index(request, file_format, destination, query=notebook['uuid'], start_time=start_time)
+      return JsonResponse({
+        'status': 0,
+        'id': notebook_id,
+        'source_type': 'query',
+        'target_type': 'index',
+        'target_path': destination,
+        'sample': format_['sample'],
+        'columns': format_['columns']
+      })
+
+#       file_format = {
+#           'name': 'col',
+#           'inputFormat': 'query',
+#           'format': {'quoteChar': '"', 'recordSeparator': '\n', 'type': 'csv', 'hasHeader': False, 'fieldSeparator': '\u0001'},
+#           "sample": '',
+#           "columns": [
+#               Field(col['name'].rsplit('.')[-1], HiveFormat.FIELD_TYPE_TRANSLATE.get(col['type'], 'string')).to_dict()
+#               for col in sample['meta']
+#           ]
+#       }
+
+#       if live_indexing:
+#         file_format['inputFormat'] = 'hs2_handle'
+#         file_format['fetch_handle'] = lambda rows, start_over: get_api(request, snippet).fetch_result(notebook, snippet, rows=rows, start_over=start_over)
+#         response['rowcount'] = _index(request, file_format, destination, query=notebook['uuid'], start_time=start_time)
+#         response['watch_url'] = reverse('search:browse', kwargs={'name': destination})
+#         response['status'] = 0
+#       else:
+#         response = _index(request, file_format, destination, query=notebook['uuid'], start_time=start_time)
     else:
       notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
       response['watch_url'] = reverse('notebook:execute_and_watch') + '?action=index_query&notebook=' + str(notebook_id) + '&snippet=0&destination=' + destination