浏览代码

HUE-2962 [editor] Support session-based properties with snippet overrides

Jenny Kim 9 年之前
父节点
当前提交
6648b21

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -164,6 +164,6 @@ class Api(object):
 
   def export_data_as_hdfs_file(self, snippet, target_file, overwrite): raise NotImplementedError()
 
-  def export_data_as_table(self, snippet, destination): raise NotImplementedError()
+  def export_data_as_table(self, notebook, snippet, destination): raise NotImplementedError()
 
-  def export_large_data_to_hdfs(self, snippet, destination): raise NotImplementedError()
+  def export_large_data_to_hdfs(self, notebook, snippet, destination): raise NotImplementedError()

+ 34 - 11
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -183,8 +183,8 @@ class HS2Api(Api):
     db = self._get_db(snippet)
 
     statement = self._get_current_statement(db, snippet)
-
-    query = self._prepare_hql_query(snippet, statement.pop('statement'))
+    session = self._get_session(notebook, snippet['type'])
+    query = self._prepare_hql_query(snippet, statement.pop('statement'), session)
 
     try:
       db.use(query.database)
@@ -347,7 +347,8 @@ class HS2Api(Api):
   def explain(self, notebook, snippet):
     db = self._get_db(snippet)
     response = self._get_current_statement(db, snippet)
-    query = self._prepare_hql_query(snippet, response.pop('statement'))
+    session = self._get_session(notebook, snippet['type'])
+    query = self._prepare_hql_query(snippet, response.pop('statement'), session)
 
     explanation = db.explain(query)
 
@@ -369,11 +370,12 @@ class HS2Api(Api):
     return '/filebrowser/view=%s' % target_file
 
 
-  def export_data_as_table(self, snippet, destination):
+  def export_data_as_table(self, notebook, snippet, destination):
     db = self._get_db(snippet)
 
     response = self._get_current_statement(db, snippet)
-    query = self._prepare_hql_query(snippet, response.pop('statement'))
+    session = self._get_session(notebook, snippet['type'])
+    query = self._prepare_hql_query(snippet, response.pop('statement'), session)
 
     if not query.hql_query.strip().lower().startswith('select'):
       raise Exception(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
@@ -392,11 +394,12 @@ class HS2Api(Api):
     return hql, success_url
 
 
-  def export_large_data_to_hdfs(self, snippet, destination):
+  def export_large_data_to_hdfs(self, notebook, snippet, destination):
     db = self._get_db(snippet)
 
     response = self._get_current_statement(db, snippet)
-    query = self._prepare_hql_query(snippet, response.pop('statement'))
+    session = self._get_session(notebook, snippet['type'])
+    query = self._prepare_hql_query(snippet, response.pop('statement'), session)
 
     if not query.hql_query.strip().lower().startswith('select'):
       raise Exception(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
@@ -433,18 +436,27 @@ class HS2Api(Api):
     return upgraded_properties
 
 
+  def _get_session(self, notebook, type='hive'):
+    session = next((session for session in notebook['sessions'] if session['type'] == type), None)
+    return session
+
+
   def _get_hive_execution_engine(self, notebook, snippet):
     # Get hive.execution.engine from snippet properties, if none, then get from session
     properties = snippet['properties']
     settings = properties.get('settings', [])
 
     if not settings:
-      session = next((session for session in notebook['sessions'] if session['type'] == 'hive'), None)
+      session = self._get_session(notebook, 'hive')
       if not session:
         raise Exception(_('Cannot get jobs, failed to find active HS2 session for user: %s') % self.user.username)
-      settings = session['properties']
+      properties = session['properties']
+      settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
 
-    engine = next((setting['value'] for setting in settings if setting['key'] == 'hive.execution.engine'), DEFAULT_HIVE_ENGINE)
+    if settings:
+      engine = next((setting['value'] for setting in settings if setting['key'] == 'hive.execution.engine'), DEFAULT_HIVE_ENGINE)
+    else:
+      engine = DEFAULT_HIVE_ENGINE
 
     return engine
 
@@ -499,10 +511,21 @@ class HS2Api(Api):
     return resp
 
 
-  def _prepare_hql_query(self, snippet, statement):
+  def _prepare_hql_query(self, snippet, statement, session):
     settings = snippet['properties'].get('settings', None)
     file_resources = snippet['properties'].get('files', None)
     functions = snippet['properties'].get('functions', None)
+    properties = session['properties']
+
+    if not settings:
+      settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
+
+    if not file_resources:
+      settings = next((prop['value'] for prop in properties if prop['key'] == 'files'), None)
+
+    if not functions:
+      settings = next((prop['value'] for prop in properties if prop['key'] == 'functions'), None)
+
     database = snippet.get('database') or 'default'
 
     return hql_query(

+ 58 - 9
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -56,18 +56,12 @@ class TestHiveserver2Api(object):
             "status": "running",
             "database": "default",
             "properties": {
-                "files": [{
-                    "path": "/user/test/myudfs.jar",
-                    "type": "jar"
-                }],
+                "files": [],
                 "functions": [{
                     "class_name": "org.hue.udf.MyUpper",
                     "name": "myUpper"
                 }],
-                "settings": [{
-                    "value": "spark",
-                    "key": "hive.execution.engine"
-                }]
+                "settings": []
             },
             "result": {
                 "handle": {
@@ -89,9 +83,64 @@ class TestHiveserver2Api(object):
             "id": "9b50e364-f7b2-303d-e924-db8b0bd9866d"
         }
     """ % {'statement': statement}
+    session_json = """
+            {
+                "type": "hive",
+                "properties": [
+                    {
+                        "multiple": true,
+                        "value": [
+                            {
+                                "path": "/user/test/myudfs.jar",
+                                "type": "jar"
+                            }
+                        ],
+                        "nice_name": "Files",
+                        "key": "files",
+                        "help_text": "Add one or more files, jars, or archives to the list of resources.",
+                        "type": "hdfs-files"
+                    },
+                    {
+                        "multiple": true,
+                        "value": [
+                            {
+                                "class_name": "org.hue.udf.MyUpper",
+                                "name": "myUpper"
+                            }
+                        ],
+                        "nice_name": "Functions",
+                        "key": "functions",
+                        "help_text": "Add one or more registered UDFs (requires function name and fully-qualified class name).",
+                        "type": "functions"
+                    },
+                    {
+                        "multiple": true,
+                        "value": [
+                            {
+                                "value": "spark",
+                                "key": "hive.execution.engine"
+                            }
+                        ],
+                        "nice_name": "Settings",
+                        "key": "settings",
+                        "help_text": "Hive and Hadoop configuration properties.",
+                        "type": "settings",
+                        "options": [
+                            "hive.map.aggr",
+                            "hive.exec.compress.output",
+                            "hive.exec.parallel",
+                            "hive.execution.engine",
+                            "mapreduce.job.queuename"
+                        ]
+                    }
+                ],
+                "id": 30
+            }
+    """
 
     snippet = json.loads(snippet_json)
-    hql_query = self.api._prepare_hql_query(snippet, statement)
+    session = json.loads(session_json)
+    hql_query = self.api._prepare_hql_query(snippet, statement, session)
 
     assert_equal([{'key': 'hive.execution.engine', 'value': 'spark'}], hql_query.settings)
     assert_equal([{'type': 'jar', 'path': '/user/test/myudfs.jar'}], hql_query.file_resources)

+ 2 - 2
desktop/libs/notebook/src/notebook/views.py

@@ -142,10 +142,10 @@ def execute_and_watch(request):
   api = get_api(request, snippet)
 
   if action == 'save_as_table':
-    sql, success_url = api.export_data_as_table(snippet, destination)
+    sql, success_url = api.export_data_as_table(notebook, snippet, destination)
     editor = make_notebook(name='Execute and watch', editor_type=editor_type, statement=sql, status='ready-execute')
   elif action == 'insert_as_query':
-    sql, success_url = api.export_large_data_to_hdfs(snippet, destination)
+    sql, success_url = api.export_large_data_to_hdfs(notebook, snippet, destination)
     editor = make_notebook(name='Execute and watch', editor_type=editor_type, statement=sql, status='ready-execute')
   else:
     raise PopupException(_('Action %s is unknown') % action)