Просмотр исходного кода

HUE-4431 [editor] Batch mode does not always update the query

We need to convert the current query to the history notebook and continue with it.
If not, we use the content of the last notebook from the last time it was saved by the user.

Changing the flow by creating the history, then executing it, then updating it (before was
just executing it, then creating the history).
Romain Rigaux 9 лет назад
Родитель
Сommit
9182f6bb43

+ 9 - 2
desktop/libs/notebook/src/notebook/api.py

@@ -102,19 +102,25 @@ def close_session(request):
 def execute(request):
   response = {'status': -1}
   result = None
+  history = None
 
   notebook = json.loads(request.POST.get('notebook', '{}'))
   snippet = json.loads(request.POST.get('snippet', '{}'))
+  is_query = notebook['type'].startswith('query-')
 
   try:
     try:
+      if is_query:
+        history = _historify(notebook, request.user)
+        notebook = Notebook(document=history).get_data()
+
       response['handle'] = get_api(request, snippet).execute(notebook, snippet)
 
       # Retrieve and remove the result from the handle
       if response['handle'].get('sync'):
         result = response['handle'].pop('result')
     finally:
-      if notebook['type'].startswith('query-'):
+      if is_query:
         _snippet = [s for s in notebook['snippets'] if s['id'] == snippet['id']][0]
         if 'handle' in response: # No failure
           _snippet['result']['handle'] = response['handle']
@@ -124,7 +130,8 @@ def execute(request):
         else:
           _snippet['status'] = 'failed'
 
-        history = _historify(notebook, request.user)
+        history.update_data(notebook)
+        history.save()
 
         response['history_id'] = history.id
         response['history_uuid'] = history.uuid

+ 4 - 0
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -467,6 +467,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
               "id": "ca11fcb1-11a5-f534-8200-050c8e1e57e3"
           },
           "statement": "%(statement)s",
+          "statement_raw": "%(statement)s",
           "type": "hive",
           "properties": {
               "files": [],
@@ -499,6 +500,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
                 "id": "ca11fcb1-11a5-f534-8200-050c8e1e57e3"
             },
             "statement": "%(statement)s",
+            "statement_raw": "%(statement)s",
             "type": "hive",
             "properties": {
                 "files": [],
@@ -529,6 +531,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
               "id": "ca11fcb1-11a5-f534-8200-050c8e1e57e3"
           },
           "statement": "%(statement)s",
+          "statement_raw": "%(statement)s",
           "type": "hive",
           "properties": {
               "files": [],
@@ -567,6 +570,7 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
               "id": "ca11fcb1-11a5-f534-8200-050c8e1e57e3"
           },
           "statement": "%(statement)s",
+          "statement_raw": "%(statement)s",
           "type": "hive",
           "properties": {
               "files": [],