Răsfoiți Sursa

HUE-5304 [indexer] Add support to native CSV indexing

Romain Rigaux 8 ani în urmă
părinte
comite
540ff49006

+ 35 - 5
desktop/libs/indexer/src/indexer/api3.py

@@ -34,7 +34,7 @@ from indexer.file_format import HiveFormat
 from indexer.fields import Field
 from indexer.indexers.morphline import MorphlineIndexer
 from indexer.indexers.sql import SQLIndexer
-from indexer.solr_client import SolrClient, SolrClientException
+from indexer.solr_client import SolrClient, SolrClientException, MAX_UPLOAD_SIZE
 
 
 LOG = logging.getLogger(__name__)
@@ -167,10 +167,40 @@ def importer_submit(request):
   start_time = json.loads(request.POST.get('start_time', '-1'))
 
   if destination['ouputFormat'] == 'index':
-    _convert_format(source["format"], inverse=True)
-    collection_name = destination["name"]
     source['columns'] = destination['columns']
-    job_handle = _index(request, source, collection_name, start_time=start_time)
+    index_name = destination["name"]
+
+    if destination['indexerRunJob']:
+      _convert_format(source["format"], inverse=True)
+      job_handle = _index(request, source, index_name, start_time=start_time)
+    else:
+      client = SolrClient(request.user)
+      unique_key_field = destination['indexerDefaultField'] and destination['indexerDefaultField'][0] or None
+      df = destination['indexerPrimaryKey'] and destination['indexerPrimaryKey'][0] or None
+      kwargs = {}
+
+      stats = request.fs.stats(source["path"])
+      if stats.size > MAX_UPLOAD_SIZE:
+        raise PopupException(_('File size is too large to handle!'))
+
+      indexer = MorphlineIndexer(request.user, request.fs)
+      fields = indexer.get_kept_field_list(source['columns'])
+      if not unique_key_field:
+        unique_key_field = 'hue_id'
+        fields += [{"name": unique_key_field, "type": "string"}]
+        kwargs['rowid'] = unique_key_field
+
+      client.create_index(
+          name=index_name,
+          fields=fields,
+          unique_key_field=unique_key_field,
+          df=df
+      )
+
+      data = request.fs.read(source["path"], 0, MAX_UPLOAD_SIZE)
+      client.index(name=index_name, data=data, **kwargs)
+
+      job_handle = {'status': 0, 'on_success_url': reverse('search:browse', kwargs={'name': index_name})}
   elif destination['ouputFormat'] == 'database':
     job_handle = _create_database(request, source, destination, start_time)
   else:
@@ -227,7 +257,7 @@ def _index(request, file_format, collection_name, query=None, start_time=None):
   if is_unique_generated:
     schema_fields += [{"name": unique_field, "type": "string"}]
 
-  client = SolrClient(user=request.user) 
+  client = SolrClient(user=request.user)
   try:
     client.get_index_schema(collection_name)
   except SolrClientException:

+ 1 - 1
desktop/libs/indexer/src/indexer/indexers/morphline.py

@@ -177,7 +177,7 @@ class MorphlineIndexer(object):
       "fields": self.get_field_list(data['columns']),
       "num_base_fields": len(data['columns']),
       "uuid_name" : uuid_name,
-      "get_regex": Indexer._get_regex_for_type,
+      "get_regex": MorphlineIndexer._get_regex_for_type,
       "format_settings": data['format'],
       "format_class": get_file_format_class(data['format']['type']),
       "get_kept_args": get_checked_args,

+ 16 - 1
desktop/libs/indexer/src/indexer/solr_client.py

@@ -133,6 +133,21 @@ class SolrClient(object):
       self._create_non_solr_cloud_index(name, fields, unique_key_field, df)
 
 
+  def index(self, name, data, content_type='csv', version=None, **kwargs):
+    """
+    separator = ','
+    fieldnames = 'a,b,c' # header=true
+    skip 'a,b'
+    encapsulator="
+    escape=\
+    map
+    split
+    overwrite=true
+    rowid=id
+    """
+    return self.api.update(name, data, content_type=content_type, version=version, **kwargs)
+
+
   def _create_cloud_config(self, name, fields, unique_key_field, df):
     with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
       tmp_path, solr_config_path = copy_configs(fields=fields, unique_key_field=unique_key_field, df=df, solr_cloud_mode=True)
@@ -198,7 +213,7 @@ class SolrClient(object):
         raise PopupException(_('Could not remove collection: %(message)s') % result)
 
 
-  def list_configs(self):    
+  def list_configs(self):
     return self.api.configs()
 
 

+ 13 - 5
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -668,13 +668,13 @@ ${ assist.assistPanel() }
 
             <div class="control-group">
               <label for="kuduPks" class="control-label"><div>${ _('Primary key') }</div>
-                <select id="kuduPks" data-bind="selectize: columns, selectedOptions: primaryKeys, selectedObjects: primaryKeyObjects, optionsValue: 'name', optionsText: 'name', innerSubscriber: 'name'" size="1" multiple="false"></select>
+                <select id="kuduPks" data-bind="selectize: columns, selectedOptions: indexerPrimaryKey, selectedObjects: indexerPrimaryKeyObject, optionsValue: 'name', optionsText: 'name', innerSubscriber: 'name'" size="1" multiple="false"></select>
               </label>
             </div>
 
             <div class="control-group">
               <label for="kuduPks" class="control-label"><div>${ _('Default field') }</div>
-                <select id="kuduPks" data-bind="selectize: columns, selectedOptions: primaryKeys, selectedObjects: primaryKeyObjects, optionsValue: 'name', optionsText: 'name', innerSubscriber: 'name'" size="1" multiple="false"></select>
+                <select id="kuduPks" data-bind="selectize: columns, selectedOptions: indexerDefaultField, selectedObjects: indexerDefaultFieldObject, optionsValue: 'name', optionsText: 'name', innerSubscriber: 'name'" size="1" multiple="false"></select>
               </label>
             </div>
 
@@ -1530,6 +1530,10 @@ ${ assist.assistPanel() }
       self.indexerConfigSets = ko.observableArray([]);
       self.indexerNumShards = ko.observable('');
       self.indexerReplicationFactor = ko.observable(1);
+      self.indexerPrimaryKey = ko.observableArray();
+      self.indexerPrimaryKeyObject = ko.observableArray();
+      self.indexerDefaultField = ko.observableArray();
+      self.indexerDefaultFieldObject = ko.observableArray();
     };
 
     var CreateWizard = function (vm) {
@@ -1783,9 +1787,13 @@ ${ assist.assistPanel() }
           "destination": ko.mapping.toJSON(self.destination),
           "start_time": ko.mapping.toJSON((new Date()).getTime())
         }, function (resp) {
-          if (resp.status == 0 && resp.history_uuid) {
-            $.jHueNotify.info("${ _('Task submitted.') }");
-            huePubSub.publish('notebook.task.submitted', resp.history_uuid);
+          if (resp.status == 0) {
+            if (resp.history_uuid) {
+              $.jHueNotify.info("${ _('Task submitted.') }");
+              huePubSub.publish('notebook.task.submitted', resp.history_uuid);
+            } else {
+              $.jHueNotify.info("${ _('Creation success.') }");
+            }
           } else {
             $(document).trigger("error", resp && resp.message ? resp.message : '${ _("Error importing") }');
           }

+ 29 - 26
desktop/libs/libsolr/src/libsolr/api.py

@@ -372,7 +372,7 @@ class SolrApi(object):
         ('action', 'CREATE'),
         ('name', name),
         ('numShards', shards),
-        ('replicationFactor', replication),        
+        ('replicationFactor', replication),
         ('wt', 'json')
       )
       if config_name:
@@ -380,8 +380,7 @@ class SolrApi(object):
           ('collection.configName', config_name),
         )
       if kwargs:
-        params += ((key, val) for key, val in kwargs.iteritems())
-        
+        params += tuple(((key, val) for key, val in kwargs.iteritems()))
 
       response = self._root.post('admin/collections', params=params, contenttype='application/json')
       return self._get_json(response)
@@ -391,7 +390,7 @@ class SolrApi(object):
 
   def add_fields(self, name, fields):
     try:
-      params = self._get_params() + (        
+      params = self._get_params() + (
         ('wt', 'json'),
       )
 
@@ -479,7 +478,7 @@ class SolrApi(object):
     except RestException, e:
       raise PopupException(e, title=_('Error while accessing Solr'))
     return response
-    
+
 
   def remove_core(self, name):
     try:
@@ -666,6 +665,31 @@ class SolrApi(object):
       raise PopupException(e, title=_('Error while accessing Solr'))
 
 
+  def update(self, collection_or_core_name, data, content_type='csv', version=None, **kwargs):
+    if content_type == 'csv':
+      content_type = 'application/csv'
+    elif content_type == 'json':
+      content_type = 'application/json'
+    else:
+      LOG.error("Trying to update collection  %s with content type %s. Allowed content types: csv/json" % (collection_or_core_name, content_type))
+
+    params = self._get_params() + (
+        ('wt', 'json'),
+        ('overwrite', 'true'),
+        ('commit', 'true'),
+    )
+    if version is not None:
+      params += (
+        ('_version_', version),
+        ('versions', 'true')
+      )
+    if kwargs:
+      params += tuple(((key, val) for key, val in kwargs.iteritems()))
+
+    response = self._root.post('%s/update' % collection_or_core_name, contenttype=content_type, params=params, data=data)
+    return self._get_json(response)
+
+
   # Deprecated
   def create_collection(self, name, shards=1, replication=1):
     try:
@@ -882,27 +906,6 @@ class SolrApi(object):
     except RestException, e:
       raise PopupException(e, title=_('Error while accessing Solr'))
 
-  def update(self, collection_or_core_name, data, content_type='csv', version=None):
-    if content_type == 'csv':
-      content_type = 'application/csv'
-    elif content_type == 'json':
-      content_type = 'application/json'
-    else:
-      LOG.error("Trying to update collection  %s with content type %s. Allowed content types: csv/json" % (collection_or_core_name, content_type))
-
-    params = self._get_params() + (
-        ('wt', 'json'),
-        ('overwrite', 'true'),
-        ('commit', 'true'),
-    )
-    if version is not None:
-      params += (
-        ('_version_', version),
-        ('versions', 'true')
-      )
-    response = self._root.post('%s/update' % collection_or_core_name, contenttype=content_type, params=params, data=data)
-    return self._get_json(response)
-
 
 GAPS = {
     '5MINUTES': {