Ver Fonte

[notebook] Add support for Hive UDF in the properties panel

Note there us a bunch of todos still:
- integrate with new binding
- support FILE ARCHIVE
- protect for index out of bounds

Will continue when the new binding is up
Romain Rigaux há 10 anos atrás
pai
commit
25468c0

+ 1 - 1
desktop/core/src/desktop/api2.py

@@ -59,7 +59,7 @@ def api_error_handler(func):
 
 
 @api_error_handler
-def get_documents(request): # TODO only here for assist
+def get_documents(request): # TODO only here for not breaking assist for now
   filters = {
       'owner': request.user
   }

+ 7 - 0
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -123,6 +123,13 @@ class HS2Api(Api):
     file_resources = snippet['properties'].get('files', None)
     functions = snippet['properties'].get('functions', None)
 
+    if settings:
+      settings = [{'key': s.rsplit('=', 1)[0], 'value': s.rsplit('=', 1)[1]} for s in settings] # TODO integrate with new binding
+    if file_resources:
+      file_resources = [{'type': 'JAR', 'path': f} for f in file_resources] # TODO support FILE ARCHIVE
+    if functions:
+      functions = [{'name': f.rsplit(' ', 1)[0], 'class_name': f.rsplit(' ', 1)[1]} for f in functions] # TODO protect for index out of bounds
+
     query = hql_query(statement, query_type=QUERY_TYPES[0], settings=settings, file_resources=file_resources, functions=functions)
 
     try:

+ 4 - 1
desktop/libs/notebook/src/notebook/static/notebook/js/notebook.ko.js

@@ -127,11 +127,14 @@
       properties['py_file'] = '';
       properties['arguments'] = [];
     }
-    else if (snippetType == 'hive' || snippetType == 'impala') {
+    else if (snippetType == 'hive') {
       properties['settings'] = [];
       properties['files'] = [];
       properties['functions'] = [];
     }
+    else if (snippetType == 'impala') {
+      properties['settings'] = [];
+    }
     else if (snippetType == 'pig') {
       properties['parameters'] = [];
       properties['hadoopProperties'] = [];

+ 2 - 1
desktop/libs/notebook/src/notebook/templates/editor_components.mako

@@ -566,7 +566,9 @@ ${ require.config() }
         <!-- ko template: { if: typeof properties().numExecutors != 'undefined', name: 'property', data: { type: 'number', label: '${ _ko('Executors') }', value: properties().numExecutors, title: '${ _ko('Number of executors to launch (Default: 2)') }' }} --><!-- /ko -->
         <!-- ko template: { if: typeof properties().queue != 'undefined', name: 'property', data: { type: 'string', label: '${ _ko('Queue') }', value: properties().queue, title: '${ _ko('The YARN queue to submit to (Default: default)') }' }} --><!-- /ko -->
         <!-- ko template: { if: typeof properties().archives != 'undefined', name: 'property', data: { type: 'csv-hdfs-files', label: '${ _ko('Archives') }', value: properties().archives, title: '${ _ko('Archives to be extracted into the working directory of each executor (YARN only)') }', placeholder: '${ _ko('e.g. file.zip') }'}} --><!-- /ko -->
+
         <!-- ko template: { if: typeof properties().files != 'undefined', name: 'property', data: { type: 'csv-hdfs-files', label: '${ _ko('Files') }', value: properties().files, title: '${ _ko('Files to be placed in the working directory of each executor.') }', placeholder: '${ _ko('e.g. file.data') }'}} --><!-- /ko -->
+        <!-- ko template: { if: typeof properties().functions != 'undefined', name: 'property', data: { type: 'csv', label: '${ _ko('Functions') }', value: properties().functions, title: '${ _ko('UDFs name and class') }', placeholder: '${ _ko('e.g. myUpper org.hue.udf.MyUpper') }'}} --><!-- /ko -->
         <!-- ko template: { if: typeof properties().settings != 'undefined', name: 'property', data: { type: 'csv', label: '${ _ko('Settings') }', value: properties().settings, title: '${ _ko('Spark properties') }', placeholder: '${ _ko('e.g. foo=value') }'}} --><!-- /ko -->
 
         <!-- ko template: { if: typeof properties().parameters != 'undefined', name: 'property', data: { type: 'csv', label: '${ _ko('Parameters') }', value: properties().parameters, title: '${ _ko('Names and values of Pig parameters and options') }', placeholder: '${ _ko('e.g. input /user/data, -param input=/user/data, -optimizer_off SplitFilter, -verbose') }'}} --><!-- /ko -->
@@ -1023,7 +1025,6 @@ ${ require.config() }
                   <i class="fa fa-external-link"></i>
                 </a>
               </div>
-
               <!-- /ko -->
               <!-- ko if: ['pyspark', 'spark'].indexOf(type()) != -1 && typeof properties != 'undefined' -->
               <div style="display:block; width:100%;">

+ 17 - 15
desktop/libs/notebook/src/notebook/views.py

@@ -107,38 +107,40 @@ def browse(request, database, table):
 
   editor = Notebook()
   editor.data = json.dumps({
-    'description':'',
-    'sessions':[
+    'description': '',
+    'sessions': [
       {
-         'type':'hive',
-         'properties':[
+         'type': 'hive',
+         'properties': [
 
          ],
-         'id':None
+         'id': None
       }
     ],
-    'selectedSnippet':'hive',
+    'selectedSnippet': 'hive',
     'type': 'query-%s' % editor_type,
 
-    'snippets':[
+    'snippets': [
       {
-         'status':'ready-execute',
-         'id':'e8b323b3-88ef-3a84-6264-af11fa5fbefb',
+         'status': 'ready-execute',
+         'id': 'e8b323b3-88ef-3a84-6264-af11fa5fbefb',
          'statement_raw': sql_select,
          'statement': sql_select,
          'type': editor_type,
-         'properties':{
-            'files':[
+         'properties': {
+            'files': [
             ],
-            'settings':[
+            'functions': [
+            ],
+            'settings': [
             ]
          },
          'name': 'Browse',
-         'database':'default',
-         'result':{  }
+         'database': 'default',
+         'result': {}
       }
     ],
-    'name':'Browse'
+    'name': 'Browse'
   })
 
   return render('editor.mako', request, {