Bläddra i källkod

[spark] Support for Spark server API

Redirect to according API depending on Snippet type (e.g. hive, impala, spark scala...)
Romain Rigaux 11 år sedan
förälder
incheckning
5ef84f87ed

+ 70 - 12
apps/spark/src/spark/api.py

@@ -32,7 +32,7 @@ from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, QueryHistory
 from beeswax.views import safe_get_design, save_design
 from beeswax.server import dbms
 
-from spark.job_server_api import get_api
+from spark.job_server_api import get_api as get_spark_api
 from spark.forms import SparkForm, QueryForm
 from desktop.lib.i18n import smart_str
 from spark.design import SparkDesign
@@ -44,10 +44,28 @@ from spark.decorators import json_error_handler
 LOG = logging.getLogger(__name__)
 
 
+def get_api(user, snippet):
+  if snippet['type'] == 'hive':
+    return HS2Api(user)
+  else:
+    return SparkApi(user)
+
+
+def _get_snippet_session(notebook, snippet):
+  return [session for session in notebook['sessions'] if session['type'] == snippet['type']][0] 
+
+
+
 class HS2Api():
   
   def __init__(self, user):
     self.user = user
+    
+  def create_session(self, lang):
+    return {
+        'type': lang,
+        'id': None # Real one at some point
+    }
   
   def execute(self, notebook, snippet):
     db = dbms.get(self.user)
@@ -115,24 +133,64 @@ class HS2Api():
     pass  
 
 
-class SparkApi(): # Pig, DBquery, Phoenix... 
+class SparkApi():  # Pig, DBquery, Phoenix... 
   
   def __init__(self, user):
-    pass
+    self.user = user
   
-  def execute(self):
-    pass
+  def create_session(self, lang='scala'):
+    api = get_spark_api(self.user)
+    return {
+        'type': lang,
+        'id': api.create_session(lang=lang)
+    } 
+  
+  def execute(self, notebook, snippet):    
+    api = get_spark_api(self.user)
+    session = _get_snippet_session(notebook, snippet)
+    
+    return {'id': api.submit_statement(session['id'], snippet['statement']).split('cells/')[1]}
 
-  def check_status(self):
-    pass
+  def check_status(self, notebook, snippet):
+    return {'status': 'finished'}
 
-  def fetch_result(self):
-    pass
+  def fetch_result(self, notebook, snippet):
+    api = get_spark_api(self.user)
+    session = _get_snippet_session(notebook, snippet)
+    cell = snippet['result']['handle']['id']  
+    
+    data = api.fetch_data(session['id'], cell)
+      
+    return {
+        'data': [data['output']],
+        'meta': [{
+          'name': column.name,
+          'type': column.type,
+          'comment': column.comment
+        } for column in []]
+    }
 
   def cancel(self):
     pass
 
 
+
+def create_session(request):
+  response = {'status': -1}
+
+  notebook = json.loads(request.POST.get('notebook', '{}'))
+  snippet = json.loads(request.POST.get('snippet', '{}'))
+
+  try:
+    response['session'] = get_api(request.user, snippet).create_session(lang=snippet['type'])
+    response['status'] = 0
+  except Exception, e:
+    raise PopupException(e, title=_('Error while accessing query server'))
+    response['error'] = force_unicode(str(e))
+
+  return HttpResponse(json.dumps(response), mimetype="application/json")
+
+
 def execute(request):
   response = {'status': -1}
 
@@ -140,7 +198,7 @@ def execute(request):
   snippet = json.loads(request.POST.get('snippet', '{}'))
 
   try:
-    response['handle'] = HS2Api(request.user).execute(notebook, snippet)
+    response['handle'] = get_api(request.user, snippet).execute(notebook, snippet)
     response['status'] = 0
   except Exception, e:
     raise PopupException(e, title=_('Error while accessing query server'))
@@ -156,7 +214,7 @@ def check_status(request):
   snippet = json.loads(request.POST.get('snippet', '{}'))
 
   try:
-    response['query_status'] = HS2Api(request.user).check_status(notebook, snippet)
+    response['query_status'] = get_api(request.user, snippet).check_status(notebook, snippet)
     response['status'] = 0
   except Exception, e:
     raise PopupException(e, title=_('Error while accessing query server'))
@@ -172,7 +230,7 @@ def fetch_result(request):
   snippet = json.loads(request.POST.get('snippet', '{}'))
 
   try:
-    response['result'] = HS2Api(request.user).fetch_result(notebook, snippet)
+    response['result'] = get_api(request.user, snippet).fetch_result(notebook, snippet)
     response['status'] = 0
   except Exception, e:
     raise PopupException(e, title=_('Error while accessing query server'))

+ 1 - 1
apps/spark/src/spark/conf.py

@@ -26,7 +26,7 @@ from spark.settings import NICE_NAME
 JOB_SERVER_URL = Config(
   key="server_url",
   help=_t("URL of the Spark Job Server."),
-  default="http://localhost:8090/"
+  default="http://localhost:8080/"
 )
 
 

+ 29 - 22
apps/spark/src/spark/job_server_api.py

@@ -16,6 +16,7 @@
 # limitations under the License.
 
 import logging
+import json
 import posixpath
 import threading
 
@@ -80,32 +81,38 @@ class JobServerApi(object):
     else:
       self._thread_local.user = user
 
-  def get_status(self, **kwargs):
-    return self._root.get('healthz', params=kwargs, headers={'Accept': _TEXT_CONTENT_TYPE})
+  def create_session(self, **kwargs):
+    a = self._root.post('sessions', params=kwargs)
+    print a
+    return a
 
-  def submit_job(self, appName, classPath, data, context=None, sync=False):
-    params = {'appName': appName, 'classPath': classPath, 'sync': sync}
-    if context:
-      params['context'] = context
-    return self._root.post('jobs' % params, params=params, data=data, contenttype=_BINARY_CONTENT_TYPE)
+  def submit_statement(self, uuid, statement):
+    data = {'statement': statement}
+    return self._root.post('sessions/%s' % uuid, data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
 
-  def job(self, job_id):
-    return self._root.get('jobs/%s' % job_id, headers={'Accept': _JSON_CONTENT_TYPE})
+  def fetch_data(self, session, cell):
+    return self._root.get('sessions/%s/cells/%s' % (session, cell))
 
-  def jobs(self, **kwargs):
-    return self._root.get('jobs', params=kwargs, headers={'Accept': _JSON_CONTENT_TYPE})
+#curl http://localhost:8080/sessions/87576bf4-f22c-4681-8f33-d3a329577ec9/cells/0
+#{"id":0,"state":"COMPLETE","input":["1+2"],"output":["res0: Int = 3"],"error":[]}
 
-  def create_context(self, name, **kwargs):
-    return self._root.post('contexts/%s' % name, params=kwargs, contenttype=_BINARY_CONTENT_TYPE)
-
-  def contexts(self, **kwargs):
-    return self._root.get('contexts', params=kwargs, headers={'Accept': _JSON_CONTENT_TYPE})
-
-  def delete_context(self, name, **kwargs):
-    return self._root.delete('contexts/%s' % name)
-
-  def upload_jar(self, app_name, data):
-    return self._root.post('jars/%s' % app_name, data=data, contenttype=_BINARY_CONTENT_TYPE)
+#  def job(self, job_id):
+#    return self._root.get('jobs/%s' % job_id, headers={'Accept': _JSON_CONTENT_TYPE})
+#
+#  def jobs(self, **kwargs):
+#    return self._root.get('jobs', params=kwargs, headers={'Accept': _JSON_CONTENT_TYPE})
+#
+#  def create_context(self, name, **kwargs):
+#    return self._root.post('contexts/%s' % name, params=kwargs, contenttype=_BINARY_CONTENT_TYPE)
+#
+#  def contexts(self, **kwargs):
+#    return self._root.get('contexts', params=kwargs, headers={'Accept': _JSON_CONTENT_TYPE})
+#
+#  def delete_context(self, name, **kwargs):
+#    return self._root.delete('contexts/%s' % name)
+#
+#  def upload_jar(self, app_name, data):
+#    return self._root.post('jars/%s' % app_name, data=data, contenttype=_BINARY_CONTENT_TYPE)
 
   def jars(self, **kwargs):
     return self._root.get('jars', params=kwargs, headers={'Accept': _JSON_CONTENT_TYPE})

+ 25 - 3
apps/spark/src/spark/templates/editor.mako

@@ -23,13 +23,32 @@ ${ commonheader(_('Query'), app_name, user, "100px") | n,unicode }
 
 <div class="card card-toolbar">
   <div style="float: left; margin-left: 20px">
-    <div class="toolbar-label">${_('WIDGETS')}</div>
+    <div class="toolbar-label">${_('SPARK')}</div>
     <div data-bind="css: { 'draggable-widget': true, 'disabled': false }"
-         title="${_('Hive Query')}" rel="tooltip" data-placement="bottom">
+         title="${_('Spark Scala')}" rel="tooltip" data-placement="bottom">
+         <a data-bind="style: { cursor: true ? 'move' : 'default' }">
+           <img src="/spark/static/art/icon_spark_48.png" class="app-icon" />
+         </a>
+    </div>
+    <div data-bind="css: { 'draggable-widget': true, 'disabled': false }"
+         title="${_('Spark Scala')}" rel="tooltip" data-placement="bottom">
          <a data-bind="style: { cursor: true ? 'move' : 'default' }">
-                       <img src="/beeswax/static/art/icon_beeswax_48.png" class="app-icon" />
+           <img src="/spark/static/art/icon_spark_48.png" class="app-icon" />
          </a>
     </div>
+    <div data-bind="css: { 'draggable-widget': true, 'disabled': false }"
+         title="${_('Spark Scala')}" rel="tooltip" data-placement="bottom">
+         <a data-bind="style: { cursor: true ? 'move' : 'default' }">
+           <img src="/spark/static/art/icon_spark_48.png" class="app-icon" />
+         </a>
+    </div>    
+    <div class="toolbar-label">${_('SPARK')}</div>
+    <div data-bind="css: { 'draggable-widget': true, 'disabled': false }"
+         title="${_('Hive Query')}" rel="tooltip" data-placement="bottom">
+         <a data-bind="style: { cursor: true ? 'move' : 'default' }">
+           <img src="/beeswax/static/art/icon_beeswax_48.png" class="app-icon" />
+         </a>
+    </div>    
   </div>
   <div class="clearfix"></div>
 </div>
@@ -67,6 +86,8 @@ ${ commonheader(_('Query'), app_name, user, "100px") | n,unicode }
     <a href="javascript: void(0)" data-bind="click: newSnippet">
       <i class="fa fa-plus" title="${ _('Add') }"></i> ${ _('Add a new snippet') }
     </a>
+    <select data-bind="options: availableSnippets, value: selectedSnippet">
+    </select>    
   </div>
 </script>
 
@@ -78,6 +99,7 @@ ${ commonheader(_('Query'), app_name, user, "100px") | n,unicode }
 
     <div class="pull-right">
       <strong class="muted" data-bind="text: type"></strong>
+      <strong class="muted" data-bind="text: status"></strong>
     </div>
     <br/>
     <br/>

+ 1 - 0
apps/spark/src/spark/urls.py

@@ -36,6 +36,7 @@ urlpatterns = patterns('spark.views',
 
 # APIs
 urlpatterns += patterns('spark.api',
+  url(r'^api/create_session$', 'create_session', name='create_session'),
   url(r'^api/execute$', 'execute', name='execute'),
   url(r'^api/check_status', 'check_status', name='check_status'),
   url(r'^api/fetch_result$', 'fetch_result', name='fetch_result'),

+ 1 - 1
apps/spark/src/spark/views.py

@@ -43,7 +43,7 @@ LOG = logging.getLogger(__name__)
 @view_error_handler
 def editor(request):
   return render('editor.mako', request, {
-      'notebooks_json': json.dumps([{'snippets': [{'type': 'table', 'result': {}}]}])
+      'notebooks_json': json.dumps([{'snippets': [{'type': 'scala', 'result': {}}]}])
   })
 
 @view_error_handler

+ 54 - 11
apps/spark/static/js/spark.vm.js

@@ -46,24 +46,44 @@ var Snippet = function (notebook, snippet) {
   var self = this;
   
   self.id = ko.observable(typeof snippet.id != "undefined" && snippet.id != null ? snippet.id : UUID());
-  self.type = ko.observable('hive-sql');
+  self.type = ko.observable(typeof snippet.type != "undefined" && snippet.type != null ? snippet.type : 'hive');
   self.statement = ko.observable('');
-  self.status = ko.observable('finished');
+  self.status = ko.observable('loading');
   self.klass = ko.computed(function(){
-    return 'results '+ self.type();
+    return 'results ' + self.type();
   });
   
   self.result = new Result(snippet, snippet.result);
   
   // init()
   // checkStatus()
-  
+
+  self.create_session = function() {
+
+    $.post("/spark/api/create_session", {
+    	notebook: ko.mapping.toJSON(notebook),
+        snippet: ko.mapping.toJSON(self)
+	  }, function (data) {
+	    if (data.status == 0) {
+		  notebook.sessions.push(ko.mapping.fromJS(data.session));
+	      self.status('ready');
+	    }
+	    else {
+	      $(document).trigger("error", data.message);
+	    }
+	}).fail(function (xhr, textStatus, errorThrown) {
+      $(document).trigger("error", xhr.responseText);
+    });
+    
+    
+  };
   
   self.execute = function() {
 	$(".jHueNotify").hide();
 	logGA('/execute/' + self.type());	  
     
 	self.result.clear();
+	self.status('running');
     
     $.post("/spark/api/execute", {
         notebook: ko.mapping.toJSON(notebook),
@@ -74,7 +94,6 @@ var Snippet = function (notebook, snippet) {
         	 self.result.handle()[key] = val;
           });
 
-          self.status('running');
           self.checkStatus();
         }
         else {
@@ -124,12 +143,12 @@ var Snippet = function (notebook, snippet) {
  	    }
  	}).fail(function (xhr, textStatus, errorThrown) {
       $(document).trigger("error", xhr.responseText);
-     });
+    });
   };
 
   self.fetchResultMetadata = function() {
 	  
-  }
+  };
   
   self.cancel = function() {
 
@@ -143,20 +162,44 @@ var Notebook = function (vm, notebook) {
 
   self.id = ko.observable(typeof notebook.id != "undefined" && notebook.id != null ? notebook.id : UUID());
   self.snippets = ko.observableArray();
+  self.selectedSnippet = ko.observable('scala');
+  self.availableSnippets = ko.observableArray(['hive', 'scala', 'sql', 'python', 'pig', 'impala']); // presto, mysql, oracle, sqlite, postgres, phoenix
+  self.sessions = ko.observableArray(); // {'hive': ..., scala: ...}
 
+  self.getSession = function(session_type) {
+    var _s = null;
+    $.each(self.sessions(), function (index, s) {
+      if (s.type() == session_type) {
+        _s = s;
+        return false;
+      }
+    });
+    return _s;
+  };
+  
   self.addSnippet = function(snippet) {
-	  self.snippets.push(new Snippet(self, snippet));
+	var _snippet = new Snippet(self, snippet);
+	self.snippets.push(_snippet);
+	
+	if (self.getSession(self.selectedSnippet()) == null) {
+	  _snippet.create_session();
+    }	
   }  
 
   self.newSnippet = function() {
-    self.snippets.push(new Snippet(self, {}));
+	var snippet = new Snippet(self, {type: self.selectedSnippet()});	  
+	self.snippets.push(snippet);
+	  
+	if (self.getSession(self.selectedSnippet()) == null) {
+	  snippet.create_session();
+	}
   }  
   
   if (notebook.snippets) {
     $.each(notebook.snippets, function(index, snippet) {
       self.addSnippet(snippet);
     });
-  }
+  }  
 }
 
 
@@ -196,7 +239,7 @@ function EditorViewModel(notebooks) {
   }
 
   self.newNotebook = function() {
-	  self.notebooks.push(new Notebook(self, {}));
+	self.notebooks.push(new Notebook(self, {}));
     self.selectedNotebook(self.notebooks()[self.notebooks().length - 1]);
   }
   

+ 3 - 0
desktop/core/src/desktop/lib/rest/resource.py

@@ -50,6 +50,9 @@ class Resource(object):
     Decide whether the body should be a json dict or string
     """
 
+    if resp.headers.get('location') and resp.headers.get('location').startswith('http://localhost:8080/'):
+      return resp.headers.get('location').split('sessions/')[1]
+
     if len(resp.content) != 0 and resp.headers.get('content-type') and \
           'application/json' in resp.headers.get('content-type'):
       try: