Browse Source

HUE-2886 [spark] Show livy session process logs

Jenny Kim 10 years ago
parent
commit
4cab982

+ 1 - 1
apps/spark/src/spark/api.py

@@ -164,7 +164,7 @@ def get_logs(request):
   size = int(size) if size else None
 
   db = get_api(request.user, snippet)
-  response['logs'] = db.get_log(snippet, startFrom=startFrom, size=size)
+  response['logs'] = db.get_log(notebook, snippet, startFrom=startFrom, size=size)
   response['progress'] = db._progress(snippet, response['logs']) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
   response['job_urls'] = [{
       'name': job,

+ 13 - 0
apps/spark/src/spark/job_server_api.py

@@ -83,6 +83,19 @@ class JobServerApi(object):
   def get_status(self):
     return self._root.get('sessions')
 
+  def get_log(self, uuid, startFrom=None, size=None):
+    params = {}
+
+    if startFrom is not None:
+      params['from'] = startFrom
+
+    if size is not None:
+      params['size'] = size
+
+    response = self._root.get('sessions/%s/log' % uuid, params=params)
+
+    return '\n'.join(response['log'])
+
   def create_session(self, **properties):
     properties['proxyUser'] = self.user
     return self._root.post('sessions', data=json.dumps(properties), contenttype='application/json')

+ 7 - 4
apps/spark/src/spark/models.py

@@ -229,7 +229,7 @@ class HS2Api(Api):
     return {'status': 0}
 
   @query_error_handler
-  def get_log(self, snippet, startFrom=None, size=None):
+  def get_log(self, notebook, snippet, startFrom=None, size=None):
     db = self._get_db(snippet)
 
     handle = self._get_handle(snippet)
@@ -437,8 +437,11 @@ class SparkApi(Api):
 
     return {'status': 0}
 
-  def get_log(self, snippet, startFrom=0, size=None):
-    return 'Not available'
+  def get_log(self, notebook, snippet, startFrom=0, size=None):
+    api = get_spark_api(self.user)
+    session = _get_snippet_session(notebook, snippet)
+
+    return api.get_log(session['id'], startFrom=startFrom, size=size)
 
   def _progress(self, snippet, logs):
     return 50
@@ -505,7 +508,7 @@ class SparkBatchApi(Api):
         'status': state,
     }
 
-  def get_log(self, snippet, startFrom=0, size=None):
+  def get_log(self, notebook, snippet, startFrom=0, size=None):
     api = get_spark_api(self.user)
 
     return api.get_batch_log(snippet['result']['handle']['id'], startFrom=startFrom, size=size)