浏览代码

[importer] Prepare import from a Kafka Topic into a Table

This is part of a series of commits, with tests following-up next
as well as more polishing of the current inputs of step 1.

The goal of this PR is to make the POCs easier to demo and understand.

All is flagged of by default and is leveraing the previous design and
its past implementation to keep is as simple as possible and easy to
evolve.
Romain Rigaux 4 年之前
父节点
当前提交
5a8ddcfc33

+ 8 - 2
desktop/conf.dist/hue.ini

@@ -2136,8 +2136,14 @@ submit_to=True
 # Enable the Kafka integration.
 # Enable the Kafka integration.
 ## is_enabled=false
 ## is_enabled=false
 
 
-# Base URL of Kafka Ksql API.
-## ksql_api_url=http://127.0.0.1:8088
+# URL of Kafka REST API.
+## api_url=http://localhost:8082
+
+# URL of Kafka Ksql API.
+## ksql_api_url=http://localhost:8088
+
+# URL of Schema Registry API.
+## schema_registry_api_url=http://localhost:8081
 
 
 
 
 ###########################################################################
 ###########################################################################

+ 11 - 5
desktop/conf/pseudo-distributed.ini.tmpl

@@ -2117,12 +2117,18 @@
 
 
 [kafka]
 [kafka]
 
 
-  [[kafka]]
-    # Enable the Kafka integration.
-    ## is_enabled=false
+[[kafka]]
+# Enable the Kafka integration.
+## is_enabled=false
+
+# URL of Kafka REST API.
+## api_url=http://localhost:8082
+
+# URL of Kafka Ksql API.
+## ksql_api_url=http://localhost:8088
 
 
-    # Base URL of Kafka Ksql API.
-    ## ksql_api_url=http://127.0.0.1:8088
+# URL of Schema Registry API.
+## schema_registry_api_url=http://localhost:8081
 
 
 
 
 ###########################################################################
 ###########################################################################

+ 5 - 33
desktop/libs/indexer/src/indexer/api3.py

@@ -241,7 +241,7 @@ def guess_field_types(request):
         column_row = csv_data[0]
         column_row = csv_data[0]
       else:
       else:
         sample = csv_data[:4]
         sample = csv_data[:4]
-        column_row = ['field_' + str(count+1) for count, col in enumerate(sample[0])] 
+        column_row = ['field_' + str(count+1) for count, col in enumerate(sample[0])]
 
 
       field_type_guesses = []
       field_type_guesses = []
       for count, col in enumerate(column_row):
       for count, col in enumerate(column_row):
@@ -351,35 +351,6 @@ def guess_field_types(request):
               for col in kafkaFieldNames
               for col in kafkaFieldNames
           ]
           ]
       }
       }
-
-#       data = """%(kafkaFieldNames)s
-# %(data)s""" % {
-#         'kafkaFieldNames': ','.join(kafkaFieldNames),
-#         'data': '\n'.join([','.join(cols) for cols in topics_data])
-#       }
-#       stream = string_io()
-#       stream.write(data)
-
-#       _convert_format(file_format["format"], inverse=True)
-
-#       indexer = MorphlineIndexer(request.user, request.fs)
-
-#       format_ = indexer.guess_field_types({
-#         "file": {
-#             "stream": stream,
-#             "name": file_format['path']
-#         },
-#         "format": file_format['format']
-#       })
-#       type_mapping = dict(
-#         list(
-#           zip(kafkaFieldNames, kafkaFieldTypes)
-#         )
-#       )
-
-#       for col in format_['columns']:
-#         col['keyType'] = type_mapping[col['name']]
-#         col['type'] = type_mapping[col['name']]
     elif file_format['streamSelection'] == 'flume':
     elif file_format['streamSelection'] == 'flume':
       if 'hue-httpd/access_log' in file_format['channelSourcePath']:
       if 'hue-httpd/access_log' in file_format['channelSourcePath']:
         columns = [
         columns = [
@@ -483,7 +454,7 @@ def importer_submit(request):
           source,
           source,
           destination, index_name
           destination, index_name
       )
       )
-  elif source['inputFormat'] in ('stream', 'connector') or destination['ouputFormat'] == 'stream':
+  elif destination['ouputFormat'] == 'stream-table':
     args = {
     args = {
       'source': source,
       'source': source,
       'destination': destination,
       'destination': destination,
@@ -553,6 +524,7 @@ def importer_submit(request):
         start_time
         start_time
       )
       )
     else:
     else:
+      # TODO: if inputFormat is 'stream' and tableFormat is 'kudu' --> create Table only
       job_handle = _create_table(
       job_handle = _create_table(
         request,
         request,
         source,
         source,
@@ -741,9 +713,9 @@ def save_pipeline(request):
 
 
 
 
 def upload_local_file_drag_and_drop(request):
 def upload_local_file_drag_and_drop(request):
-
   response = {'status': -1, 'data': ''}
   response = {'status': -1, 'data': ''}
   form = UploadLocalFileForm(request.POST, request.FILES)
   form = UploadLocalFileForm(request.POST, request.FILES)
+
   if form.is_valid():
   if form.is_valid():
     resp = upload_local_file(request)
     resp = upload_local_file(request)
     json_data = json.loads(resp.content)
     json_data = json.loads(resp.content)
@@ -756,10 +728,10 @@ def upload_local_file_drag_and_drop(request):
 
 
 
 
 def upload_local_file(request):
 def upload_local_file(request):
-
   upload_file = request.FILES['file']
   upload_file = request.FILES['file']
   username = request.user.username
   username = request.user.username
   filename = "%s_%s:%s;" % (username, uuid.uuid4(), upload_file.name)
   filename = "%s_%s:%s;" % (username, uuid.uuid4(), upload_file.name)
+
   temp_file = tempfile.NamedTemporaryFile(prefix=filename, suffix='.csv', delete=False)
   temp_file = tempfile.NamedTemporaryFile(prefix=filename, suffix='.csv', delete=False)
   temp_file.write(upload_file.read())
   temp_file.write(upload_file.read())
   local_file_url = temp_file.name
   local_file_url = temp_file.name

+ 1 - 1
desktop/libs/indexer/src/indexer/indexers/sql.py

@@ -87,7 +87,7 @@ class SQLIndexer(object):
     is_transactional = destination['isTransactional']
     is_transactional = destination['isTransactional']
     default_transactional_type = 'insert_only' if destination['isInsertOnly'] else 'default'
     default_transactional_type = 'insert_only' if destination['isInsertOnly'] else 'default'
 
 
-    skip_header = destination['hasHeader']
+    skip_header = destination.get('hasHeader')
 
 
     primary_keys = destination['primaryKeys']
     primary_keys = destination['primaryKeys']
 
 

+ 72 - 34
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -341,13 +341,17 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
           <!-- /ko -->
           <!-- /ko -->
 
 
           <!-- ko if: createWizard.source.inputFormat() == 'stream' -->
           <!-- ko if: createWizard.source.inputFormat() == 'stream' -->
-            <div class="control-group">
-              <label class="control-label"><div>${ _('List') }</div>
-                <select data-bind="selectize: createWizard.source.publicStreams, value: createWizard.source.streamSelection, optionsText: 'name', optionsValue: 'value'" placeholder="${ _('The list of streams to consume, e.g. SFDC, Jiras...') }"></select>
-              </label>
-            </div>
+            ## <div class="control-group">
+            ##  <label class="control-label"><div>${ _('List') }</div>
+            ##    <select data-bind="selectize: createWizard.source.publicStreams, value: createWizard.source.streamSelection, optionsText: 'name', optionsValue: 'value'" placeholder="${ _('The list of streams to consume, e.g. SFDC, Jiras...') }"></select>
+            ##  </label>
+            ## s</div>
 
 
             <!-- ko if: createWizard.source.streamSelection() == 'kafka' -->
             <!-- ko if: createWizard.source.streamSelection() == 'kafka' -->
+              <div data-bind="template: { name: 'kafka-cluster-template', data: $data }" class="margin-top-10 field inline-block"></div>
+
+              <br>
+
               <div data-bind="template: { name: 'kafka-topic-template', data: $data }" class="margin-top-10 field inline-block"></div>
               <div data-bind="template: { name: 'kafka-topic-template', data: $data }" class="margin-top-10 field inline-block"></div>
             <!-- /ko -->
             <!-- /ko -->
 
 
@@ -1342,6 +1346,29 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
   <!-- /ko -->
   <!-- /ko -->
 </script>
 </script>
 
 
+<script type="text/html" id="kafka-cluster-template">
+  <div class="control-group">
+    <label class="control-label"><div>${ _('Clusters') }</div>
+      <select class="input-xxlarge" data-bind="options: createWizard.source.kafkaClusters,
+            value: createWizard.source.kafkaSelectedCluster,
+            optionsCaption: '${ _("Choose...") }'"
+            placeholder="${ _('The list of Kafka cluster to consume topics from') }">
+      </select>
+
+    </label>
+
+    <br/>
+
+    ## <label class="control-group" data-bind="visible: createWizard.source.kafkaSelectedCluster">
+    ##  <label class="control-label"><div>${ _('Username') }</div>
+    ##    <input type="text" class="input-small" data-bind="value: createWizard.source.kafkaSelectedClusterUsername">
+    ##  </label>
+    ##  <label class="control-label"><div>${ _('Password') }</div>
+    ##    <input type="text" class="input-small" data-bind="value: createWizard.source.kafkaSelectedClusterPassword">
+    ##  </label>
+  </div>
+</script>
+
 <script type="text/html" id="kafka-topic-template">
 <script type="text/html" id="kafka-topic-template">
   <div class="control-group">
   <div class="control-group">
     <label class="control-label"><div>${ _('Topics') }</div>
     <label class="control-label"><div>${ _('Topics') }</div>
@@ -1747,6 +1774,7 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
         if (val === 'stream') {
         if (val === 'stream') {
           if (self.streamSelection() === 'kafka') {
           if (self.streamSelection() === 'kafka') {
             wizard.guessFormat();
             wizard.guessFormat();
+            wizard.destination.outputFormat('table');
             wizard.destination.tableFormat('kudu');
             wizard.destination.tableFormat('kudu');
           } else {
           } else {
             wizard.destination.tableFormat('text');
             wizard.destination.tableFormat('text');
@@ -1805,7 +1833,7 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
         });
         });
         wizard.destination.dialect(dialect[0]['dialect']);
         wizard.destination.dialect(dialect[0]['dialect']);
       });
       });
-  
+
       // File
       // File
       self.path = ko.observable('');
       self.path = ko.observable('');
       self.path.subscribe(function(val) {
       self.path.subscribe(function(val) {
@@ -2064,13 +2092,23 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
         }
         }
       });
       });
 
 
+      self.kafkaClusters = ko.observableArray(['localhost', 'demo.gethue.com']);
+      self.kafkaSelectedCluster = ko.observable();
+      self.kafkaSelectedCluster.subscribe(function(val) {
+        if (val) {
+          wizard.guessFormat();
+        }
+      });
+      self.kafkaSelectedCluster('localhost');
+      self.kafkaSelectedClusterUsername = ko.observable('gethue');
+      self.kafkaSelectedClusterPassword = ko.observable('pwd');
       self.kafkaTopics = ko.observableArray();
       self.kafkaTopics = ko.observableArray();
       self.kafkaSelectedTopics = ko.observable(''); // Currently designed just for one
       self.kafkaSelectedTopics = ko.observable(''); // Currently designed just for one
       self.kafkaSelectedTopics.subscribe(function(newValue) {
       self.kafkaSelectedTopics.subscribe(function(newValue) {
         if (newValue) {
         if (newValue) {
           viewModel.createWizard.guessFieldTypes();
           viewModel.createWizard.guessFieldTypes();
-          self.kafkaFieldNames(hueUtils.hueLocalStorage('pai' + '_kafka_topics_' + newValue + '_kafkaFieldNames'));
-          self.kafkaFieldTypes(hueUtils.hueLocalStorage('pai' + '_kafka_topics_' + newValue + '_kafkaFieldTypes'));
+          //self.kafkaFieldNames(hueUtils.hueLocalStorage('pai' + '_kafka_topics_' + newValue + '_kafkaFieldNames'));
+          //self.kafkaFieldTypes(hueUtils.hueLocalStorage('pai' + '_kafka_topics_' + newValue + '_kafkaFieldTypes'));
         }
         }
       });
       });
       self.kafkaSchemaManual = ko.observable('detect');
       self.kafkaSchemaManual = ko.observable('detect');
@@ -2331,7 +2369,6 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
             return false;
             return false;
           }
           }
           if (format.value === 'table' &&
           if (format.value === 'table' &&
-              wizard.source.inputFormat() === 'stream' ||
               (wizard.source.inputFormat() === 'table' || (
               (wizard.source.inputFormat() === 'table' || (
                 wizard.source.inputFormat() === 'rdbms' && wizard.source.rdbmsAllTablesSelected()))) {
                 wizard.source.inputFormat() === 'rdbms' && wizard.source.rdbmsAllTablesSelected()))) {
             return false;
             return false;
@@ -2507,7 +2544,7 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
       self.KUDU_DEFAULT_PARTITION_COLUMN = {columns: [], range_partitions: [self.KUDU_DEFAULT_RANGE_PARTITION_COLUMN], name: 'HASH', int_val: 16};
       self.KUDU_DEFAULT_PARTITION_COLUMN = {columns: [], range_partitions: [self.KUDU_DEFAULT_RANGE_PARTITION_COLUMN], name: 'HASH', int_val: 16};
 
 
       self.tableFormats = ko.pureComputed(function() {
       self.tableFormats = ko.pureComputed(function() {
-        if (wizard.source.inputFormat() === 'kafka') {
+        if (wizard.source.inputFormat() === 'stream') {
           return [{'value': 'kudu', 'name': 'Kudu'}];
           return [{'value': 'kudu', 'name': 'Kudu'}];
         } else if (vm.sourceType == 'impala') { // Impala supports Kudu
         } else if (vm.sourceType == 'impala') { // Impala supports Kudu
           return [
           return [
@@ -2812,7 +2849,8 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
               self.source.streamObjects(resp['objects']);
               self.source.streamObjects(resp['objects']);
             }
             }
 
 
-            if (self.source.inputFormat() !== 'stream' && self.source.inputFormat() !== 'connector') {
+            if (self.source.inputFormat() !== 'stream' && self.source.inputFormat() !== 'connector' &&
+                (self.source.inputFormat() == 'localfile' && self.source.path() != '') ) {
               self.guessFieldTypes();
               self.guessFieldTypes();
             }
             }
           }
           }
@@ -3188,29 +3226,29 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
 
 
       document.getElementById('inputfile').onchange = function () {
       document.getElementById('inputfile').onchange = function () {
         upload();
         upload();
-        };
-        function upload() {
-          var fd = new FormData();
-          var files = $('#inputfile')[0].files[0];
-          fd.append('file', files);
-          var file_size = files.size;
-          if (file_size > 200000) {
-                $.jHueNotify.warn("${ _('File size exceeds the supported size (200 KB).') }");
-              }
-          else {
-            $.ajax({
-              url:"/indexer/api/indexer/upload_local_file",
-              type: 'post',
-              data: fd,
-              contentType:false,
-              cache: false,
-              processData:false,
-              success:function (response) {
-                viewModel.createWizard.source.path(response['local_file_url']);
-              }
-            });
-          }
-        };
+      };
+
+      function upload() {
+        var fd = new FormData();
+        var files = $('#inputfile')[0].files[0];
+        fd.append('file', files);
+        var file_size = files.size;
+        if (file_size > 200000) {
+          $.jHueNotify.warn("${ _('File size exceeds the supported size (200 KB).') }");
+        } else {
+          $.ajax({
+            url:"/indexer/api/indexer/upload_local_file",
+            type: 'post',
+            data: fd,
+            contentType:false,
+            cache: false,
+            processData:false,
+            success:function (response) {
+              viewModel.createWizard.source.path(response['local_file_url']);
+            }
+          });
+        }
+      };
 
 
       $('.importer-droppable').droppable({
       $('.importer-droppable').droppable({
         accept: ".draggableText",
         accept: ".draggableText",

+ 9 - 4
desktop/libs/kafka/src/kafka/conf.py

@@ -46,15 +46,20 @@ KAFKA = ConfigSection(
       type=coerce_bool,
       type=coerce_bool,
       default=False
       default=False
     ),
     ),
-    # Deprecated
     API_URL=Config(
     API_URL=Config(
       key='api_url',
       key='api_url',
-      help=_t('Base URL of Kafka REST API.'),
+      help=_t('URL of Kafka REST API.'),
       default=None
       default=None
     ),
     ),
     KSQL_API_URL=Config(
     KSQL_API_URL=Config(
       key='ksql_api_url',
       key='ksql_api_url',
-      help=_t('Base URL of ksqlDB API.'),
-      default='http://127.0.0.1:8088'),
+      help=_t('URL of ksqlDB API.'),
+      default='http://localhost:8088'
+    ),
+    SCHEMA_REGISTRY_API_URL=Config(
+      key='schema_registry_api_url',
+      help=_t('URL of Schema Registry API.'),
+      default='http://localhost:8081'
+    ),
   )
   )
 )
 )

+ 24 - 6
desktop/libs/kafka/src/kafka/kafka_api.py

@@ -26,7 +26,7 @@ from metadata.manager_client import ManagerApi
 from notebook.models import _get_notebook_api
 from notebook.models import _get_notebook_api
 
 
 from kafka.conf import has_kafka_api
 from kafka.conf import has_kafka_api
-from kafka.kafka_client import KafkaApi, KafkaApiException
+from kafka.kafka_client import KafkaApi, KafkaApiException, SchemaRegistryApi
 
 
 if sys.version_info[0] > 2:
 if sys.version_info[0] > 2:
   from django.utils.translation import gettext as _
   from django.utils.translation import gettext as _
@@ -111,7 +111,7 @@ def get_topics(user):
       'database': 'topics'
       'database': 'topics'
     }
     }
 
 
-    from desktop.api_public import _get_interpreter_from_dialect   # due to a circular import
+    from desktop.api_public import _get_interpreter_from_dialect  # Avoid circular import
     interpreter = _get_interpreter_from_dialect('ksql', user)
     interpreter = _get_interpreter_from_dialect('ksql', user)
     api = _get_notebook_api(user, connector_id=interpreter['type'])
     api = _get_notebook_api(user, connector_id=interpreter['type'])
 
 
@@ -123,12 +123,30 @@ def get_topics(user):
 
 
 
 
 def get_topic_data(user, name):
 def get_topic_data(user, name):
-  from desktop.api_public import _get_interpreter_from_dialect   # due to a circular import
-  interpreter = _get_interpreter_from_dialect('ksql', user)
-  api = _get_notebook_api(user, connector_id=interpreter['type'])
+  if has_kafka_api():
+    print(
+      SchemaRegistryApi().subjects()
+    )
+    print(
+      SchemaRegistryApi().subject(name='Kafka-value')
+    )
+    data = {
+      'full_headers': [{'name': 'message', 'type': 'string'}],
+      'rows': [
+        ['This is rider 894 and I am at 38.1952, -123.1723'],
+        ['This is rider 98 and I am at 39.2531, -121.9547'],
+        ['This is rider 564 and I am at 22.3431, -111.7670']
+      ]
+    }
+  else:
+    from desktop.api_public import _get_interpreter_from_dialect  # Avoid circular import
+    interpreter = _get_interpreter_from_dialect('ksql', user)
+    api = _get_notebook_api(user, connector_id=interpreter['type'])
+
+    data = api.get_sample_data(snippet={})
 
 
-  data = api.get_sample_data(snippet={})
   print(data)
   print(data)
+
   return data
   return data
 
 
 
 

+ 28 - 0
desktop/libs/kafka/src/kafka/kafka_client.py

@@ -85,3 +85,31 @@ class KafkaApi(object):
       })
       })
     except RestException as e:
     except RestException as e:
       raise KafkaApiException(e)
       raise KafkaApiException(e)
+
+
+class SchemaRegistryApi(object):
+  """
+  https://github.com/confluentinc/schema-registry
+  """
+
+  def __init__(self, user=None, security_enabled=False, ssl_cert_ca_verify=False):
+    self._api_url = KAFKA.SCHEMA_REGISTRY_API_URL.get().strip('/') if KAFKA.SCHEMA_REGISTRY_API_URL.get() else ''
+
+    self.user = user
+    self._client = HttpClient(self._api_url, logger=LOG)
+    self._root = Resource(self._client)
+
+
+  def subjects(self):
+    try:
+      response = self._root.get('subjects')
+      return json.loads(response)
+    except RestException as e:
+      raise KafkaApiException(e)
+
+  def subject(self, name):
+    try:
+      response = self._root.get('subjects/%s/versions/latest' % name)
+      return json.loads(response)
+    except RestException as e:
+      raise KafkaApiException(e)