فهرست منبع

HUE-8747 [editor] Fix task run as batch.

jdesjean 6 سال پیش
والد
کامیت
029e7faa4e

+ 1 - 1
desktop/libs/notebook/src/notebook/api.py

@@ -322,7 +322,7 @@ def get_logs(request):
   jobs = db.get_jobs(notebook, snippet, full_log)
 
   response['logs'] = logs.strip()
-  response['progress'] = min(db.progress(snippet, full_log), 99) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
+  response['progress'] = min(db.progress(notebook, snippet, logs=full_log), 99) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
   response['jobs'] = jobs
   response['isFullLogs'] = isinstance(db, (OozieApi, DataEngApi))
   response['status'] = 0

+ 0 - 4
desktop/libs/notebook/src/notebook/connectors/altus_adb.py

@@ -69,10 +69,6 @@ class AltusAdbApi(Api):
     return '...'
 
 
-  def progress(self, snippet, logs):
-    return 50
-
-
   def get_jobs(self, notebook, snippet, logs):
     return []
 

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -466,7 +466,7 @@ class Api(object):
   def autocomplete(self, snippet, database=None, table=None, column=None, nested=None):
     return {}
 
-  def progress(self, snippet, logs=None):
+  def progress(self, notebook, snippet, logs=None):
     return 50
 
   def get_jobs(self, notebook, snippet, logs):

+ 0 - 4
desktop/libs/notebook/src/notebook/connectors/dataeng.py

@@ -117,10 +117,6 @@ class DataEngApi(Api):
     return ''
 
 
-  def progress(self, snippet, logs):
-    return 50
-
-
   def get_jobs(self, notebook, snippet, logs):
     ## 50cf0e00-746b-4d86-b8e3-f2722296df71
     job_id = snippet['result']['handle']['id']

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -390,7 +390,7 @@ class HS2Api(Api):
 
 
   @query_error_handler
-  def progress(self, snippet, logs):
+  def progress(self, notebook, snippet, logs=''):
     if snippet['type'] == 'hive':
       match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
       total = int(match.group(1)) if match else 1
@@ -424,7 +424,7 @@ class HS2Api(Api):
       } for job in jobs_with_state]
     elif snippet['type'] == 'impala' and ENABLE_QUERY_BROWSER.get():
       query_id = unpack_guid_base64(snippet['result']['handle']['guid'])
-      progress = min(self.progress(snippet, logs), 99) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
+      progress = min(self.progress(notebook, snippet, logs), 99) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
       jobs = [{
         'name': query_id,
         'url': '/hue/jobbrowser#!id=%s' % query_id,

+ 0 - 3
desktop/libs/notebook/src/notebook/connectors/jdbc.py

@@ -124,9 +124,6 @@ class JdbcApi(Api):
   def cancel(self, notebook, snippet):
     return {'status': 0}
 
-  def progress(self, snippet, logs):
-    return 50
-
   @query_error_handler
   def close_statement(self, notebook, snippet):
     return {'status': -1}

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/oozie_batch.py

@@ -140,7 +140,7 @@ class OozieApi(Api):
     return logs if logs else oozie_job.log
 
 
-  def progress(self, snippet, logs):
+  def progress(self, notebook, snippet, logs=None):
     job_id = snippet['result']['handle']['id']
 
     oozie_job = check_job_access_permission(self.request, job_id)
@@ -174,7 +174,7 @@ class OozieApi(Api):
 
   def _get_log_output(self, oozie_workflow):
     log_output = ''
-    q = QueryDict(self.request.GET, mutable=True)
+    q = self.request.GET.copy()
     q['format'] = 'python'  # Hack for triggering the good section in single_task_attempt_logs
     self.request.GET = q
 

+ 0 - 3
desktop/libs/notebook/src/notebook/connectors/spark_batch.py

@@ -94,6 +94,3 @@ class SparkBatchApi(Api):
   def cancel(self, notebook, snippet):
     # Batch jobs do not support interruption, so close statement instead.
     return self.close_statement(snippet)
-
-  def progress(self, snippet, logs):
-    return 50

+ 0 - 3
desktop/libs/notebook/src/notebook/connectors/spark_shell.py

@@ -340,9 +340,6 @@ class SparkApi(Api):
 
     return api.get_log(session['id'], startFrom=startFrom, size=size)
 
-  def progress(self, snippet, logs):
-    return 50
-
   def close_statement(self, notebook, snippet): # Individual statements cannot be closed
     pass
 

+ 4 - 4
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -281,7 +281,7 @@ class TestHiveserver2Api(object):
         INFO  : The url to track the job: http://jennykim-1.vpc.cloudera.com:8088/proxy/application_1466104358744_0003/
     """
 
-    assert_equal(self.api.progress(snippet, logs), 5)
+    assert_equal(self.api.progress({}, snippet, logs=logs), 5)
 
     logs += """INFO  : Starting Job = job_1466104358744_0003, Tracking URL = http://jennykim-1.vpc.cloudera.com:8088/proxy/application_1466104358744_0003/
         INFO  : Kill Command = /usr/lib/hadoop/bin/hadoop job  -kill job_1466104358744_0003
@@ -293,7 +293,7 @@ class TestHiveserver2Api(object):
         INFO  : Ended Job = job_1466104358744_0003
     """
 
-    assert_equal(self.api.progress(snippet, logs), 50)
+    assert_equal(self.api.progress({}, snippet, logs=logs), 50)
 
     snippet = json.loads("""
         {
@@ -322,7 +322,7 @@ class TestHiveserver2Api(object):
 
     logs = "Query 734a81444c85be66:d05f3bb1a6c2d0a5: 0% Complete (1 out of 4693)"
 
-    assert_equal(self.api.progress(snippet, logs), 0)
+    assert_equal(self.api.progress({}, snippet, logs=logs), 0)
 
     logs += """Query 734a81444c85be66:d05f3bb1a6c2d0a5: 20% Complete (4 out of 4693)
 
@@ -333,7 +333,7 @@ class TestHiveserver2Api(object):
     Query 734a81444c85be66:d05f3bb1a6c2d0a5: 50% Complete (234 out of 4693)
     """
 
-    assert_equal(self.api.progress(snippet, logs), 50)
+    assert_equal(self.api.progress({}, snippet, logs=logs), 50)
 
 
   def test_get_jobs(self):

+ 21 - 3
desktop/libs/notebook/src/notebook/tasks.py

@@ -34,6 +34,7 @@ from desktop.auth.backend import rewrite_user
 from desktop.celery import app
 from desktop.conf import TASK_SERVER
 from desktop.lib import export_csvxls
+from desktop.lib import fsmanager
 
 from notebook.connectors.base import get_api, QueryExpired, ResultWrapper
 from notebook.sql_utils import get_current_statement
@@ -216,11 +217,25 @@ def get_jobs(notebook, snippet, logs, **kwargs): #Re implement to fetch updated
 
   request = _get_request(**kwargs)
   api = get_api(request, snippet)
-  #insiduous problem where each call in hive api transform the guid/secret to binary form. get_log does the transform, but not get_jobs. get_jobs called after get_log so usually not an issue. Our get_log implementation doesn't
-  if hasattr(api, '_get_handle'): # This is specific to impala, should be handled in hiveserver2
-    api._get_handle(snippet)
   return api.get_jobs(notebook, snippet, logs)
 
+def progress(notebook, snippet, logs=None, **kwargs):
+  result = download_to_file.AsyncResult(notebook['uuid'])
+  state = result.state
+  if state == states.PENDING:
+    raise QueryExpired()
+  elif state == 'SUBMITTED' or states.state(state) < states.state('PROGRESS'):
+    return 1
+  elif state in states.EXCEPTION_STATES:
+    result.maybe_reraise()
+    return 1
+
+  info = result.info
+  snippet['result']['handle'] = info.get('handle', {}).copy()
+  request = _get_request(**kwargs)
+  api = get_api(request, snippet)
+  return api.progress(notebook, snippet, logs=logs)
+
 def fetch_result(notebook, snippet, rows, start_over, **kwargs):
   result = download_to_file.AsyncResult(notebook['uuid'])
   state = result.state
@@ -340,6 +355,9 @@ def _close_statement_async_id(notebook):
 def _get_request(postdict=None, user_id=None):
   request = HttpRequest()
   request.POST = postdict
+  request.fs_ref = 'default'
+  request.fs = fsmanager.get_filesystem(request.fs_ref)
+  request.jt = None
   user = User.objects.get(id=user_id)
   user = rewrite_user(user)
   request.user = user