Jelajahi Sumber

HUE-8544 [importer] Support sending file data into a kafka topic

Romain Rigaux 7 tahun lalu
induk
melakukan
0689ffb

+ 1 - 2
apps/filebrowser/src/filebrowser/views.py

@@ -652,8 +652,7 @@ def display(request, path):
     if mode == 'binary':
     if mode == 'binary':
         compression = 'none'
         compression = 'none'
         # Read out based on meta.
         # Read out based on meta.
-    compression, offset, length, contents =\
-    read_contents(compression, path, request.fs, offset, length)
+    compression, offset, length, contents = read_contents(compression, path, request.fs, offset, length)
 
 
     # Get contents as string for text mode, or at least try
     # Get contents as string for text mode, or at least try
     uni_contents = None
     uni_contents = None

+ 6 - 3
desktop/conf.dist/hue.ini

@@ -1377,15 +1377,18 @@
 
 
 [indexer]
 [indexer]
 
 
-  # Flag to turn on the Solr Morphline indexer.
-  ## enable_scalable_indexer=true
-
   # Filesystem directory containing Solr Morphline indexing libs.
   # Filesystem directory containing Solr Morphline indexing libs.
   ## config_indexer_libs_path=/tmp/smart_indexer_lib
   ## config_indexer_libs_path=/tmp/smart_indexer_lib
 
 
   # Filesystem directory containing JDBC libs.
   # Filesystem directory containing JDBC libs.
   ## config_jdbc_libs_path=/user/oozie/libext/jdbc_drivers
   ## config_jdbc_libs_path=/user/oozie/libext/jdbc_drivers
 
 
+  # Filesystem directory containing jar libs.
+  ## config_jars_libs_path=/user/oozie/libext/libs
+
+  # Flag to turn on the Solr Morphline indexer.
+  ## enable_scalable_indexer=true
+
   # Flag to turn on Sqoop ingest.
   # Flag to turn on Sqoop ingest.
   ## enable_sqoop=true
   ## enable_sqoop=true
 
 

+ 6 - 3
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1379,15 +1379,18 @@
 
 
 [indexer]
 [indexer]
 
 
-  # Flag to turn on the Solr Morphline indexer.
-  ## enable_scalable_indexer=true
-
   # Filesystem directory containing Solr Morphline indexing libs.
   # Filesystem directory containing Solr Morphline indexing libs.
   ## config_indexer_libs_path=/tmp/smart_indexer_lib
   ## config_indexer_libs_path=/tmp/smart_indexer_lib
 
 
   # Filesystem directory containing JDBC libs.
   # Filesystem directory containing JDBC libs.
   ## config_jdbc_libs_path=/user/oozie/libext/jdbc_drivers
   ## config_jdbc_libs_path=/user/oozie/libext/jdbc_drivers
 
 
+  # Filesystem directory containing jar libs.
+  ## config_jars_libs_path=/user/oozie/libext/libs
+
+  # Flag to turn on the Solr Morphline indexer.
+  ## enable_scalable_indexer=true
+
   # Flag to turn on Sqoop ingest.
   # Flag to turn on Sqoop ingest.
   ## enable_sqoop=true
   ## enable_sqoop=true
 
 

+ 15 - 6
desktop/libs/indexer/src/indexer/api3.py

@@ -321,7 +321,7 @@ def importer_submit(request):
   if destination['ouputFormat'] in ('database', 'table'):
   if destination['ouputFormat'] in ('database', 'table'):
     destination['nonDefaultLocation'] = request.fs.netnormpath(destination['nonDefaultLocation']) if destination['nonDefaultLocation'] else destination['nonDefaultLocation']
     destination['nonDefaultLocation'] = request.fs.netnormpath(destination['nonDefaultLocation']) if destination['nonDefaultLocation'] else destination['nonDefaultLocation']
 
 
-  if source['inputFormat'] == 'stream':
+  if source['inputFormat'] == 'stream' or destination['ouputFormat'] == 'stream':
     job_handle = _envelope_job(request, source, destination, start_time=start_time, lib_path=destination['indexerJobLibPath'])
     job_handle = _envelope_job(request, source, destination, start_time=start_time, lib_path=destination['indexerJobLibPath'])
   elif destination['ouputFormat'] == 'index':
   elif destination['ouputFormat'] == 'index':
     source['columns'] = destination['columns']
     source['columns'] = destination['columns']
@@ -485,7 +485,7 @@ def _envelope_job(request, file_format, destination, start_time=None, lib_path=N
   collection_name = destination['name']
   collection_name = destination['name']
   indexer = EnvelopeIndexer(request.user, request.fs)
   indexer = EnvelopeIndexer(request.user, request.fs)
 
 
-  lib_path = '/tmp/envelope.jar'
+  lib_path = None # Todo optional input field
   input_path = None
   input_path = None
 
 
   if file_format['inputFormat'] == 'table':
   if file_format['inputFormat'] == 'table':
@@ -493,9 +493,10 @@ def _envelope_job(request, file_format, destination, start_time=None, lib_path=N
     table_metadata = db.get_table(database=file_format['databaseName'], table_name=file_format['tableName'])
     table_metadata = db.get_table(database=file_format['databaseName'], table_name=file_format['tableName'])
     input_path = table_metadata.path_location
     input_path = table_metadata.path_location
   elif file_format['inputFormat'] == 'file':
   elif file_format['inputFormat'] == 'file':
-    input_path = '${nameNode}%s' % file_format["path"]
+    input_path = file_format["path"]
     properties = {
     properties = {
-      'format': 'json'
+      'input_path': input_path,
+      'format': 'csv'
     }
     }
   elif file_format['inputFormat'] == 'stream':
   elif file_format['inputFormat'] == 'stream':
     if file_format['streamSelection'] == 'sfdc':
     if file_format['streamSelection'] == 'sfdc':
@@ -511,7 +512,6 @@ def _envelope_job(request, file_format, destination, start_time=None, lib_path=N
       manager = ManagerApi()
       manager = ManagerApi()
       properties = {
       properties = {
         "brokers": manager.get_kafka_brokers(),
         "brokers": manager.get_kafka_brokers(),
-        "output_table": "impala::%s" % collection_name,
         "topics": file_format['kafkaSelectedTopics'],
         "topics": file_format['kafkaSelectedTopics'],
         "kafkaFieldType": file_format['kafkaFieldType'],
         "kafkaFieldType": file_format['kafkaFieldType'],
         "kafkaFieldDelimiter": file_format['kafkaFieldDelimiter'],
         "kafkaFieldDelimiter": file_format['kafkaFieldDelimiter'],
@@ -534,7 +534,10 @@ def _envelope_job(request, file_format, destination, start_time=None, lib_path=N
         properties['output_table'] = collection_name
         properties['output_table'] = collection_name
     elif destination['outputFormat'] == 'file':
     elif destination['outputFormat'] == 'file':
       properties['path'] = file_format["path"]
       properties['path'] = file_format["path"]
-      properties['format'] = file_format['tableFormat'] # or csv
+      if file_format['inputFormat'] == 'stream':
+        properties['format'] = 'csv'
+      else:
+        properties['format'] = file_format['tableFormat'] # or csv
     elif destination['outputFormat'] == 'index':
     elif destination['outputFormat'] == 'index':
       properties['collectionName'] = collection_name
       properties['collectionName'] = collection_name
       properties['connection'] = SOLR_URL.get()
       properties['connection'] = SOLR_URL.get()
@@ -546,6 +549,12 @@ def _envelope_job(request, file_format, destination, start_time=None, lib_path=N
         kwargs = {}
         kwargs = {}
         _create_solr_collection(request.user, request.fs, client, destination, collection_name, kwargs)
         _create_solr_collection(request.user, request.fs, client, destination, collection_name, kwargs)
 
 
+  if destination['outputFormat'] == 'stream':
+    manager = ManagerApi()
+    properties['brokers'] = manager.get_kafka_brokers()
+    properties['topics'] = file_format['kafkaSelectedTopics']
+    properties['kafkaFieldDelimiter'] = file_format['kafkaFieldDelimiter']
+
   properties["app_name"] = 'Data Ingest'
   properties["app_name"] = 'Data Ingest'
   properties["inputFormat"] = file_format['inputFormat']
   properties["inputFormat"] = file_format['inputFormat']
   properties["ouputFormat"] = destination['ouputFormat']
   properties["ouputFormat"] = destination['ouputFormat']

+ 7 - 0
desktop/libs/indexer/src/indexer/conf.py

@@ -96,6 +96,13 @@ CONFIG_JDBC_LIBS_PATH = Config(
   default='/user/oozie/libext/jdbc_drivers'
   default='/user/oozie/libext/jdbc_drivers'
 )
 )
 
 
+CONFIG_JARS_LIBS_PATH = Config(
+  key="config_jars_libs_path",
+  help=_t("Filesystem directory containing jars libs."),
+  type=str,
+  default='/user/oozie/libext/libs'
+)
+
 ENABLE_SQOOP = Config(
 ENABLE_SQOOP = Config(
   key="enable_sqoop",
   key="enable_sqoop",
   help=_t("Flag to turn on Sqoop imports."),
   help=_t("Flag to turn on Sqoop imports."),

+ 39 - 8
desktop/libs/indexer/src/indexer/indexers/envelope.py

@@ -26,6 +26,8 @@ from desktop.conf import DISABLE_HUE_3
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.hadoopfs import Hdfs
 from notebook.models import make_notebook
 from notebook.models import make_notebook
 
 
+from indexer.conf import CONFIG_JARS_LIBS_PATH
+
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
@@ -54,6 +56,8 @@ class EnvelopeIndexer(object):
 
 
   def run(self, request, collection_name, envelope, input_path, start_time=None, lib_path=None):
   def run(self, request, collection_name, envelope, input_path, start_time=None, lib_path=None):
     workspace_path = self._upload_workspace(envelope)
     workspace_path = self._upload_workspace(envelope)
+    if lib_path is None:
+      lib_path = CONFIG_JARS_LIBS_PATH.get()
 
 
     task = make_notebook(
     task = make_notebook(
       name=_('Indexing into %s') % collection_name,
       name=_('Indexing into %s') % collection_name,
@@ -70,6 +74,9 @@ class EnvelopeIndexer(object):
       shell_command = """#!/bin/bash
       shell_command = """#!/bin/bash
 
 
 export SPARK_DIST_CLASSPATH=`hadoop classpath`
 export SPARK_DIST_CLASSPATH=`hadoop classpath`
+export SPARK_DIST_CLASSPATH=/etc/hive/conf:`hadoop classpath`
+export JAVA_HOME=/usr/java/jdk1.8.0_162
+
 SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
 SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
       hdfs_shell_cmd_path = os.path.join(workspace_path, shell_command_name)
       hdfs_shell_cmd_path = os.path.join(workspace_path, shell_command_name)
       self.fs.do_as_user(self.username, self.fs.create, hdfs_shell_cmd_path, data=shell_command)
       self.fs.do_as_user(self.username, self.fs.create, hdfs_shell_cmd_path, data=shell_command)
@@ -132,15 +139,15 @@ SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
         raise PopupException(_('Stream format of %(inputFormat)s not recognized: %(streamSelection)s') % properties)
         raise PopupException(_('Stream format of %(inputFormat)s not recognized: %(streamSelection)s') % properties)
     elif properties['inputFormat'] == 'file':
     elif properties['inputFormat'] == 'file':
       input = """type = filesystem
       input = """type = filesystem
-      path = %(path)s
-      format = %(format)s
+        path = %(input_path)s
+        format = %(format)s
       """ % properties
       """ % properties
     else:
     else:
       raise PopupException(_('Input format not recognized: %(inputFormat)s') % properties)
       raise PopupException(_('Input format not recognized: %(inputFormat)s') % properties)
 
 
 
 
     if properties['ouputFormat'] == 'file':
     if properties['ouputFormat'] == 'file':
-      output = """dependencies = [inputdata]
+      output = """
         planner = {
         planner = {
           type = overwrite
           type = overwrite
         }
         }
@@ -152,7 +159,7 @@ SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
         }""" % properties
         }""" % properties
     elif properties['ouputFormat'] == 'table':
     elif properties['ouputFormat'] == 'table':
       if properties['inputFormat'] == 'stream' and properties['streamSelection'] == 'kafka':
       if properties['inputFormat'] == 'stream' and properties['streamSelection'] == 'kafka':
-        output = """dependencies = [inputdata]
+        output = """
           deriver {
           deriver {
               type = sql
               type = sql
               query.literal = \"""
               query.literal = \"""
@@ -167,7 +174,7 @@ SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
               table.name = "%(output_table)s"
               table.name = "%(output_table)s"
           }""" % properties
           }""" % properties
       else:
       else:
-        output = """dependencies = [inputdata]
+        output = """
           planner {
           planner {
               type = append
               type = append
           }
           }
@@ -176,7 +183,7 @@ SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
               table.name = "%(output_table)s"
               table.name = "%(output_table)s"
           }""" % properties
           }""" % properties
     elif properties['ouputFormat'] == 'index':
     elif properties['ouputFormat'] == 'index':
-      output = """dependencies = [inputdata]
+      output = """
         planner {
         planner {
             type = upstert
             type = upstert
         }
         }
@@ -185,13 +192,25 @@ SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
             connection = "%(connection)s"
             connection = "%(connection)s"
             collection.name = "%(collectionName)s"
             collection.name = "%(collectionName)s"
         }""" % properties
         }""" % properties
+    elif properties['ouputFormat'] == 'stream':
+      output = """
+        planner {
+            type = append
+        }
+        output {
+            type = kafka
+            brokers = "%(brokers)s"
+            topic = %(topics)s
+            serializer.type = delimited
+            serializer.field.delimiter = ","
+        }""" % properties
     else:
     else:
       raise PopupException(_('Output format not recognized: %(ouputFormat)s') % properties)
       raise PopupException(_('Output format not recognized: %(ouputFormat)s') % properties)
 
 
     return """
     return """
 application {
 application {
     name = %(app_name)s
     name = %(app_name)s
-    batch.milliseconds = 5000
+    %(batch)
     executors = 1
     executors = 1
     executor.cores = 1
     executor.cores = 1
     executor.memory = 1G
     executor.memory = 1G
@@ -205,8 +224,20 @@ steps {
     }
     }
 
 
     outputdata {
     outputdata {
+        dependencies = [inputdata]
+
+        deriver {
+          type = sql
+          query.literal = \"\"\"SELECT * from inputdata\"\"\"
+        }
+
         %(output)s
         %(output)s
     }
     }
 }
 }
 
 
-""" % {'input': input, 'output': output, 'app_name': properties['app_name']}
+""" % {
+    'input': input,
+    'output': output,
+    'app_name': properties['app_name'],
+    'batch': 'batch.milliseconds = 5000' if properties['inputFormat'] == 'stream' else ''
+  }

+ 72 - 3
desktop/libs/indexer/src/indexer/indexers/envelope_tests.py

@@ -36,7 +36,7 @@ def test_generate_from_kafka_to_file_csv():
     'kafkaFieldTypes': 'int,string',
     'kafkaFieldTypes': 'int,string',
 
 
     'ouputFormat': 'file',
     'ouputFormat': 'file',
-    'path': '/tmp/output',
+    'input_path': '/tmp/output',
     'format': 'csv'
     'format': 'csv'
   }
   }
 
 
@@ -59,12 +59,18 @@ def test_generate_from_kafka_to_file_csv():
                     enabled = true
                     enabled = true
                     milliseconds = 60000
                     milliseconds = 60000
                 }
                 }
-        
+
         }
         }
     }
     }
 
 
     outputdata {
     outputdata {
         dependencies = [inputdata]
         dependencies = [inputdata]
+
+        deriver {
+          type = sql
+          query.literal = """SELECT * from inputdata"""
+        }
+
         planner = {
         planner = {
           type = overwrite
           type = overwrite
         }
         }
@@ -118,6 +124,12 @@ def test_generate_from_stream_sfdc_to_hive_table():
 
 
     outputdata {
     outputdata {
         dependencies = [inputdata]
         dependencies = [inputdata]
+
+        deriver {
+          type = sql
+          query.literal = """SELECT * from inputdata"""
+        }
+
           planner {
           planner {
               type = append
               type = append
           }
           }
@@ -166,12 +178,18 @@ def test_generate_from_stream_kafka_to_solr_index():
                     enabled = true
                     enabled = true
                     milliseconds = 60000
                     milliseconds = 60000
                 }
                 }
-        
+
         }
         }
     }
     }
 
 
     outputdata {
     outputdata {
         dependencies = [inputdata]
         dependencies = [inputdata]
+
+        deriver {
+          type = sql
+          query.literal = """SELECT * from inputdata"""
+        }
+
         planner {
         planner {
             type = upstert
             type = upstert
         }
         }
@@ -182,3 +200,54 @@ def test_generate_from_stream_kafka_to_solr_index():
         }
         }
     }
     }
 }''' in  config, config)
 }''' in  config, config)
+
+
+def test_generate_from_file_to_kafka():
+  properties = {
+    'app_name': 'Ingest',
+
+    'inputFormat': 'file',
+    'input_path': '/tmp/output',
+    'format': 'csv',
+
+    'ouputFormat': 'stream',
+    'streamSelection': 'kafka',
+    'brokers': 'broker:9092',
+    'topics': 'kafkaTopic',
+    'kafkaFieldType': 'delimited',
+  }
+
+  config = EnvelopeIndexer(username='test').generate_config(properties)
+
+  assert_true('''steps {
+    inputdata {
+        input {
+            type = filesystem
+        path = /tmp/output
+        format = csv
+      
+        }
+    }
+
+    outputdata {
+        dependencies = [inputdata]
+
+        deriver {
+          type = sql
+          query.literal = """SELECT * from inputdata"""
+        }
+
+        
+        planner {
+            type = append
+        }
+        output {
+            type = kafka
+            brokers = "broker:9092"
+            topic = kafkaTopic
+            serializer.type = delimited
+            serializer.field.delimiter = ","
+        }
+    }
+}
+''' in  config, config)

+ 75 - 54
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -344,55 +344,7 @@ ${ assist.assistPanel() }
             </div>
             </div>
 
 
             <!-- ko if: createWizard.source.streamSelection() == 'kafka' -->
             <!-- ko if: createWizard.source.streamSelection() == 'kafka' -->
-              <div class="control-group">
-                <label class="control-label"><div>${ _('Topics') }</div>
-                  <select class="input-xxlarge" data-bind="options: createWizard.source.kafkaTopics,
-                         value: createWizard.source.kafkaSelectedTopics,
-                         optionsCaption: '${ _("Choose...") }'"
-                         placeholder="${ _('The list of topics to consume, e.g. orders,returns') }"></select>
-                  ## <select data-bind="selectize: createWizard.source.kafkaTopics, value: createWizard.source.kafkaSelectedTopics" placeholder="${ _('The list of topics to consume, e.g. orders,returns') }"></select>
-                </label>
-
-                <br/>
-
-                <div class="control-group" data-bind="visible: createWizard.source.kafkaSelectedTopics">
-                  <label class="control-label"><div>${ _('Schema') }</div>
-                    <label class="checkbox inline-block">
-                      <input type="radio" name="kafkaSchemaManual" value="manual" data-bind="checked: createWizard.source.kafkaSchemaManual" /> ${_('Manual')}
-                    </label>
-                    <label class="checkbox inline-block">
-                      <input type="radio" name="kafkaSchemaManual" value="detect" data-bind="checked: createWizard.source.kafkaSchemaManual" /> ${_('Guess')}
-                    </label>
-                  </label>
-
-                  <label class="control-label" data-bind="visible: createWizard.source.kafkaSchemaManual() == 'manual'">
-                  ##<label class="control-label"><div>${ _('Encoding') }</div>
-                  ##  <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldType">
-                  ##</label>
-                  <label class="control-label"><div>${ _('Type') }</div>
-                    <select class="input-medium" data-bind="options: ['delimited', 'bitarray'], value: createWizard.source.kafkaFieldType"></select>
-                  </label>
-                  <label class="control-label"><div>${ _('Delimiter') }</div>
-                    <input type="text" class="input-small" data-bind="value: createWizard.source.kafkaFieldDelimiter">
-                  </label>
-
-                  <br/>
-
-                  <label class="control-label"><div>${ _('Field names') }</div>
-                    <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldNames" placeholder="${ _('The list of fields to consume, e.g. orders,returns') }">
-                  </label>
-                  <label class="control-label"><div>${ _('Field types') }</div>
-                    <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldTypes" placeholder="${ _('The list of field typs, e.g. string,int') }">
-                  </label>
-                </label>
-
-                <div class="control-group" data-bind="visible: createWizard.source.hasStreamSelected">
-                  <button class="btn" data-bind="click: createWizard.source.streamCheckConnection">
-                    ${_('Test')}
-                  </button>
-                </div>
-                </div>
-              </div>
+              <div data-bind="template: { name: 'kafka-topic-template', data: $data }" class="margin-top-10 field inline-block"></div>
             <!-- /ko -->
             <!-- /ko -->
 
 
             <!-- ko if: createWizard.source.streamSelection() == 'sfdc' -->
             <!-- ko if: createWizard.source.streamSelection() == 'sfdc' -->
@@ -417,9 +369,9 @@ ${ assist.assistPanel() }
                 <!-- ko if: createWizard.source.streamUsername() && createWizard.source.streamPassword() && createWizard.source.streamToken() -->
                 <!-- ko if: createWizard.source.streamUsername() && createWizard.source.streamPassword() && createWizard.source.streamToken() -->
                 <label class="control-label"><div>${ _('Object') }</div>
                 <label class="control-label"><div>${ _('Object') }</div>
                   <select class="input-xxlarge" data-bind="options: createWizard.source.streamObjects,
                   <select class="input-xxlarge" data-bind="options: createWizard.source.streamObjects,
-                         value: createWizard.source.streamObject,
-                         optionsCaption: '${ _("Choose...") }'"
-                         placeholder="${ _('The SFDC object to import, e.g. Account, Opportunity') }"></select>
+                        value: createWizard.source.streamObject,
+                        optionsCaption: '${ _("Choose...") }'"
+                        placeholder="${ _('The SFDC object to import, e.g. Account, Opportunity') }"></select>
                 </label>
                 </label>
                 <!-- /ko -->
                 <!-- /ko -->
               </div>
               </div>
@@ -545,17 +497,22 @@ ${ assist.assistPanel() }
               <select id="destinationType" data-bind="selectize: outputFormats, value: outputFormat, optionsValue: 'value', optionsText: 'name'"></select>
               <select id="destinationType" data-bind="selectize: outputFormats, value: outputFormat, optionsValue: 'value', optionsText: 'name'"></select>
             </label>
             </label>
           </div>
           </div>
+
           <div class="control-group">
           <div class="control-group">
             <label for="collectionName" class="control-label "><div>${ _('Name') }</div></label>
             <label for="collectionName" class="control-label "><div>${ _('Name') }</div></label>
+
             <!-- ko if: outputFormat() == 'file' -->
             <!-- ko if: outputFormat() == 'file' -->
               <input type="text" class="form-control name input-xxlarge" id="collectionName" data-bind="value: name, filechooser: name, filechooserOptions: { linkMarkup: true, skipInitialPathIfEmpty: true, openOnFocus: true, selectFolder: true, displayOnlyFolders: true, uploadFile: false}" placeholder="${ _('Name') }" title="${ _('Directory must not exist in the path') }">
               <input type="text" class="form-control name input-xxlarge" id="collectionName" data-bind="value: name, filechooser: name, filechooserOptions: { linkMarkup: true, skipInitialPathIfEmpty: true, openOnFocus: true, selectFolder: true, displayOnlyFolders: true, uploadFile: false}" placeholder="${ _('Name') }" title="${ _('Directory must not exist in the path') }">
             <!-- /ko -->
             <!-- /ko -->
+
             <!-- ko if: outputFormat() == 'index' -->
             <!-- ko if: outputFormat() == 'index' -->
               <input type="text" class="form-control input-xlarge" id="collectionName" data-bind="value: name, valueUpdate: 'afterkeydown'" placeholder="${ _('Name') }">
               <input type="text" class="form-control input-xlarge" id="collectionName" data-bind="value: name, valueUpdate: 'afterkeydown'" placeholder="${ _('Name') }">
             <!-- /ko -->
             <!-- /ko -->
+
             <!-- ko if: ['table', 'database'].indexOf(outputFormat()) != -1 -->
             <!-- ko if: ['table', 'database'].indexOf(outputFormat()) != -1 -->
               <input type="text" class="input-xlarge" data-bind="value: name, hivechooser: name, namespace: namespace, compute: compute, skipColumns: true, skipTables: outputFormat() == 'database', valueUpdate: 'afterkeydown', apiHelperUser: '${ user }', apiHelperType: sourceType, mainScrollable: $(MAIN_SCROLLABLE), attr: { 'placeholder': outputFormat() == 'table' ? '${  _ko('Table name or <database>.<table>') }' : '${  _ko('Database name') }' }" pattern="^([a-zA-Z0-9_]+\.)?[a-zA-Z0-9_]*$" title="${ _('Only alphanumeric and underscore characters') }">
               <input type="text" class="input-xlarge" data-bind="value: name, hivechooser: name, namespace: namespace, compute: compute, skipColumns: true, skipTables: outputFormat() == 'database', valueUpdate: 'afterkeydown', apiHelperUser: '${ user }', apiHelperType: sourceType, mainScrollable: $(MAIN_SCROLLABLE), attr: { 'placeholder': outputFormat() == 'table' ? '${  _ko('Table name or <database>.<table>') }' : '${  _ko('Database name') }' }" pattern="^([a-zA-Z0-9_]+\.)?[a-zA-Z0-9_]*$" title="${ _('Only alphanumeric and underscore characters') }">
             <!-- /ko -->
             <!-- /ko -->
+
             <!-- ko if: outputFormat() == 'altus' -->
             <!-- ko if: outputFormat() == 'altus' -->
               <!-- ko if: namespaces().length > 1 -->
               <!-- ko if: namespaces().length > 1 -->
                 <select data-bind="selectize: namespaces, value: namespace, optionsValue: 'id', optionsText: 'name'" class="input-medium"></select>
                 <select data-bind="selectize: namespaces, value: namespace, optionsValue: 'id', optionsText: 'name'" class="input-medium"></select>
@@ -572,6 +529,14 @@ ${ assist.assistPanel() }
                 <i class="fa fa-info"></i>
                 <i class="fa fa-info"></i>
               </a>
               </a>
             <!-- /ko -->
             <!-- /ko -->
+
+            <!-- ko if: outputFormat() == 'stream' -->
+              <!-- ko with: $root -->
+                <input type="text" data-bind="value: createWizard.source.kafkaSelectedTopics">
+                ## <div data-bind="template: { name: 'kafka-topic-template' }" class="margin-top-10 field inline-block"></div>
+              <!-- /ko -->
+            <!-- /ko -->
+
             <span class="help-inline muted" data-bind="visible: !isTargetExisting() && isTargetChecking()">
             <span class="help-inline muted" data-bind="visible: !isTargetExisting() && isTargetChecking()">
               <i class="fa fa-spinner fa-spin"></i>
               <i class="fa fa-spinner fa-spin"></i>
             </span>
             </span>
@@ -1033,7 +998,6 @@ ${ assist.assistPanel() }
     </div>
     </div>
   </div>
   </div>
 
 
-
 </script>
 </script>
 
 
 
 
@@ -1071,7 +1035,7 @@ ${ assist.assistPanel() }
       <span data-bind="visible: showProperties">
       <span data-bind="visible: showProperties">
         <input type="text" class="input-medium margin-left-5" placeholder="${ _('Field comment') }" data-bind="value: comment">
         <input type="text" class="input-medium margin-left-5" placeholder="${ _('Field comment') }" data-bind="value: comment">
         <label class="checkbox" data-bind="visible: $root.createWizard.destination.tableFormat() == 'kudu'">
         <label class="checkbox" data-bind="visible: $root.createWizard.destination.tableFormat() == 'kudu'">
-         <input type="checkbox" data-bind="checked: keep"> ${_('Keep')}
+          <input type="checkbox" data-bind="checked: keep"> ${_('Keep')}
         </label>
         </label>
       </span>
       </span>
     </span>
     </span>
@@ -1245,6 +1209,57 @@ ${ assist.assistPanel() }
   <!-- /ko -->
   <!-- /ko -->
 </script>
 </script>
 
 
+<script type="text/html" id="kafka-topic-template">
+  <div class="control-group">
+    <label class="control-label"><div>${ _('Topics') }</div>
+      <select class="input-xxlarge" data-bind="options: createWizard.source.kafkaTopics,
+            value: createWizard.source.kafkaSelectedTopics,
+            optionsCaption: '${ _("Choose...") }'"
+            placeholder="${ _('The list of topics to consume, e.g. orders,returns') }">
+      </select>
+      ## <select data-bind="selectize: createWizard.source.kafkaTopics, value: createWizard.source.kafkaSelectedTopics" placeholder="${ _('The list of topics to consume, e.g. orders,returns') }"></select>
+    </label>
+
+    <br/>
+
+    <div class="control-group" data-bind="visible: createWizard.source.kafkaSelectedTopics">
+      <label class="control-label"><div>${ _('Schema') }</div>
+        <label class="checkbox inline-block">
+          <input type="radio" name="kafkaSchemaManual" value="manual" data-bind="checked: createWizard.source.kafkaSchemaManual" /> ${_('Manual')}
+        </label>
+        <label class="checkbox inline-block">
+          <input type="radio" name="kafkaSchemaManual" value="detect" data-bind="checked: createWizard.source.kafkaSchemaManual" /> ${_('Guess')}
+        </label>
+      </label>
+
+      <label class="control-label" data-bind="visible: createWizard.source.kafkaSchemaManual() == 'manual'">
+      ##<label class="control-label"><div>${ _('Encoding') }</div>
+      ##  <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldType">
+      ##</label>
+      <label class="control-label"><div>${ _('Type') }</div>
+        <select class="input-medium" data-bind="options: ['delimited', 'bitarray'], value: createWizard.source.kafkaFieldType"></select>
+      </label>
+      <label class="control-label"><div>${ _('Delimiter') }</div>
+        <input type="text" class="input-small" data-bind="value: createWizard.source.kafkaFieldDelimiter">
+      </label>
+
+      <br/>
+
+      <label class="control-label"><div>${ _('Field names') }</div>
+        <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldNames" placeholder="${ _('The list of fields to consume, e.g. orders,returns') }">
+      </label>
+      <label class="control-label"><div>${ _('Field types') }</div>
+        <input type="text" class="input-xxlarge" data-bind="value: createWizard.source.kafkaFieldTypes" placeholder="${ _('The list of field typs, e.g. string,int') }">
+      </label>
+
+      <div class="control-group" data-bind="visible: createWizard.source.hasStreamSelected">
+        <button class="btn" data-bind="click: createWizard.source.streamCheckConnection">
+          ${_('Test')}
+        </button>
+      </div>
+    </div>
+  </div>
+</script>
 
 
 <script type="text/javascript">
 <script type="text/javascript">
   % if is_embeddable:
   % if is_embeddable:
@@ -1888,6 +1903,9 @@ ${ assist.assistPanel() }
           % if ENABLE_SQOOP.get() or ENABLE_KAFKA.get():
           % if ENABLE_SQOOP.get() or ENABLE_KAFKA.get():
           {'name': 'File', 'value': 'file'},
           {'name': 'File', 'value': 'file'},
           % endif
           % endif
+          % if ENABLE_KAFKA.get():
+          {'name': 'Stream', 'value': 'stream'},
+          % endif
           % if ENABLE_ALTUS.get():
           % if ENABLE_ALTUS.get():
           {'name': 'Altus SDX', 'value': 'altus'},
           {'name': 'Altus SDX', 'value': 'altus'},
           % endif
           % endif
@@ -1912,6 +1930,9 @@ ${ assist.assistPanel() }
           if (format.value === 'altus' && ['table'].indexOf(wizard.source.inputFormat()) === -1) {
           if (format.value === 'altus' && ['table'].indexOf(wizard.source.inputFormat()) === -1) {
             return false;
             return false;
           }
           }
+          if (format.value === 'stream' && ['file'].indexOf(wizard.source.inputFormat()) === -1) {
+            return false;
+          }
           if (format.value === 'hbase' && wizard.source.inputFormat() !== 'rdbms') {
           if (format.value === 'hbase' && wizard.source.inputFormat() !== 'rdbms') {
             return false;
             return false;
           }
           }