Browse Source

HUE-2637 [spark] Close Spark session when closing the notebook

Jenny Kim 10 years ago
parent
commit
3712e91e7e

+ 9 - 5
apps/spark/src/spark/api.py

@@ -194,17 +194,19 @@ def open_notebook(request):
 @require_POST
 @check_document_access_permission()
 def close_notebook(request):
-  response = {'status': -1}
+  response = {'status': -1, 'result': []}
 
   notebook = json.loads(request.POST.get('notebook', '{}'))
 
-  response['status'] = 0
   for snippet in notebook['snippets']:
     try:
-      if snippet['result']['handle']:
-        get_api(request.user, snippet).close(snippet)
+      response['result'].append(get_api(request.user, snippet).close(notebook, snippet))
     except QueryExpired:
       pass
+    except Exception, e:
+      LOG.exception('Error closing session %s' % e.message)
+
+  response['status'] = 0
   response['message'] = _('Notebook closed !')
 
   return JsonResponse(response)
@@ -219,9 +221,11 @@ def close_statement(request):
   snippet = json.loads(request.POST.get('snippet', '{}'))
 
   try:
-    response['result'] = get_api(request.user, snippet).close(snippet)
+    response['result'] = get_api(request.user, snippet).close(notebook, snippet)
   except QueryExpired:
     pass
+
   response['status'] = 0
+  response['message'] = _('Statement closed !')
 
   return JsonResponse(response)

+ 4 - 1
apps/spark/src/spark/job_server_api.py

@@ -104,6 +104,9 @@ class JobServerApi(object):
   def cancel(self, session):
     return self._root.post('sessions/%s/interrupt' % session)
 
+  def close(self, uuid):
+    return self._root.delete('sessions/%s' % uuid)
+
   def get_batches(self):
     return self._root.get('batches')
 
@@ -131,5 +134,5 @@ class JobServerApi(object):
 
     return '\n'.join(response['log'])
 
-  def delete_batch(self, uuid):
+  def close_batch(self, uuid):
     return self._root.delete('batches/%s' % uuid)

+ 25 - 2
apps/spark/src/spark/models.py

@@ -407,8 +407,18 @@ class SparkApi():
   def _progress(self, snippet, logs):
     return 50
 
-  def close(self, snippet):
-    pass
+  def close(self, notebook, snippet):
+    api = get_spark_api(self.user)
+    session = _get_snippet_session(notebook, snippet)
+
+    if session['id'] is not None:
+      api.close(session['id'])
+      return {
+        'session': session['id'],
+        'status': 'closed'
+      }
+    else:
+      return {'status': 'skipped'}
 
   def _get_jobs(self, log):
     return []
@@ -460,6 +470,19 @@ class SparkBatchApi():
 
     return api.get_batch_log(snippet['result']['handle']['id'], startFrom=startFrom, size=size)
 
+  def close(self, snippet):
+    api = get_spark_api(self.user)
+
+    session_id = snippet['result']['handle']['id']
+    if session_id is not None:
+      api.close_batch(session_id)
+      return {
+        'session': session_id,
+        'status': 'closed'
+      }
+    else:
+      return {'status': 'skipped'}
+
   def _progress(self, snippet, logs):
     return 50
 

+ 3 - 5
apps/spark/src/spark/static/spark/js/spark.ko.js

@@ -646,11 +646,9 @@ var Notebook = function (vm, notebook) {
   };
 
   self.close = function () {
-    if (self.id() != null) {
-      $.post("/spark/api/notebook/close", {
-        "notebook": ko.mapping.toJSON(self)
-      });
-    }
+    $.post("/spark/api/notebook/close", {
+      "notebook": ko.mapping.toJSON(self)
+    });
   };
 
   self.clearResults = function () {