Преглед изворни кода

[spark] Add an Notebook example with Python, Scala and SQL

Romain Rigaux пре 10 година
родитељ
комит
ab7c87e

+ 6 - 6
apps/spark/src/spark/api.py

@@ -147,14 +147,14 @@ def save_notebook(request):
 
   if notebook.get('id'):
     notebook_doc = Document2.objects.get(id=notebook['id'])
-  else:      
+  else:
     notebook_doc = Document2.objects.create(name=notebook['name'], type='notebook', owner=request.user)
     Document.objects.link(notebook_doc, owner=notebook_doc.owner, name=notebook_doc.name, description=notebook_doc.description, extra='notebook')
 
   notebook_doc.update_data(notebook)
   notebook_doc.name = notebook['name']
   notebook_doc.save()
-  
+
   response['status'] = 0
   response['id'] = notebook_doc.id
   response['message'] = _('Notebook saved !')
@@ -168,7 +168,7 @@ def open_notebook(request):
 
   notebook_id = request.GET.get('notebook')
   notebook = Notebook(document=Document2.objects.get(id=notebook_id))
-  
+
   response['status'] = 0
   response['notebook'] = notebook.get_json()
   response['message'] = _('Notebook saved !')
@@ -181,11 +181,11 @@ def close_notebook(request):
   response = {'status': -1}
 
   notebook = json.loads(request.POST.get('notebook', '{}'))
-  
+
   response['status'] = 0
   for snippet in notebook['snippets']:
     try:
-      if snippet['result']['handle']:      
+      if snippet['result']['handle']:
         get_api(request.user, snippet).close(snippet)
     except QueryExpired:
       pass
@@ -201,7 +201,7 @@ def close_statement(request):
   notebook = json.loads(request.POST.get('notebook', '{}'))
   snippet = json.loads(request.POST.get('snippet', '{}'))
 
-  try:    
+  try:
     response['result'] = get_api(request.user, snippet).close(snippet)
   except QueryExpired:
     pass

Разлика између датотеке није приказан због своје велике величине
+ 2 - 2
apps/spark/src/spark/fixtures/initial_spark_examples.json


+ 41 - 41
apps/spark/src/spark/models.py

@@ -52,30 +52,30 @@ class QueryError(Exception):
 
 
 class Notebook():
-  
+
   def __init__(self, document=None):
     self.document = None
-    
+
     if document is not None:
       self.data = document.data
       self.document = document
-    else:    
+    else:
       self.data = json.dumps({
-          'name': 'My Notebook', 
+          'name': 'My Notebook',
           'snippets': []
       })
 
   def get_json(self):
     _data = self.get_data()
-    
+
     return json.dumps(_data)
- 
+
   def get_data(self):
     _data = json.loads(self.data)
-  
+
     if self.document is not None:
-      _data['id'] = self.document.id 
-  
+      _data['id'] = self.document.id
+
     return _data
 
 
@@ -83,26 +83,26 @@ def get_api(user, snippet):
   if snippet['type'] in ('hive', 'impala', 'spark-sql'):
     return HS2Api(user)
   elif snippet['type'] == 'text':
-    return TextApi(user)  
+    return TextApi(user)
   else:
     return SparkApi(user)
 
 
 def _get_snippet_session(notebook, snippet):
-  return [session for session in notebook['sessions'] if session['type'] == snippet['type']][0] 
+  return [session for session in notebook['sessions'] if session['type'] == snippet['type']][0]
 
 
 class TextApi():
-  
+
   def __init__(self, user):
     self.user = user
-    
+
   def create_session(self, lang):
     return {
         'type': lang,
         'id': None
     }
-  
+
 
 # HS2
 
@@ -116,18 +116,18 @@ def query_error_handler(func):
         raise QueryExpired(e)
       else:
         raise QueryError(message)
-  return decorator  
-  
+  return decorator
+
 
 class HS2Api():
-  
+
   def __init__(self, user):
     self.user = user
-    
+
   def _get_handle(self, snippet):
     snippet['result']['handle']['secret'], snippet['result']['handle']['guid'] = HiveServerQueryHandle.get_decoded(snippet['result']['handle']['secret'], snippet['result']['handle']['guid'])
     return HiveServerQueryHandle(**snippet['result']['handle'])
-    
+
   def _get_db(self, snippet):
     if snippet['type'] == 'hive':
       name = 'beeswax'
@@ -135,22 +135,22 @@ class HS2Api():
       name = 'impala'
     else:
       name = 'spark-sql'
-      
+
     return dbms.get(self.user, query_server=get_query_server_config(name=name))
-    
+
   def create_session(self, lang):
     return {
         'type': lang,
         'id': None # Real one at some point
     }
-  
+
   def execute(self, notebook, snippet):
     db = self._get_db(snippet)
     query = hql_query(snippet['statement'], QUERY_TYPES[0])
-    
+
     try:
       handle = db.client.query(query)
-    except QueryServerException, ex:      
+    except QueryServerException, ex:
       raise QueryError(ex.message)
 
     # All good
@@ -162,13 +162,13 @@ class HS2Api():
         'has_result_set': handle.has_result_set,
         'modified_row_count': handle.modified_row_count,
         'log_context': handle.log_context
-    }    
+    }
 
   @query_error_handler
   def check_status(self, notebook, snippet):
     response = {}
     db = self._get_db(snippet)
-      
+
     handle = self._get_handle(snippet)
     operation =  db.get_operation_status(handle)
     status = HiveServerQueryHistory.STATE_MAP[operation.operationState]
@@ -177,16 +177,16 @@ class HS2Api():
       raise QueryError(operation.errorMessage)
 
     response['status'] = 'running' if status.index in (QueryHistory.STATE.running.index, QueryHistory.STATE.submitted.index) else 'available'
-    
+
     return response
 
   @query_error_handler
   def fetch_result(self, notebook, snippet, rows, start_over):
     db = self._get_db(snippet)
-      
+
     handle = self._get_handle(snippet)
     results = db.fetch(handle, start_over=start_over, rows=rows)
-    
+
     # No escaping...
     return {
         'has_more': results.has_more,
@@ -201,7 +201,7 @@ class HS2Api():
 
   @query_error_handler
   def fetch_result_metadata(self):
-    pass 
+    pass
 
   @query_error_handler
   def cancel(self, notebook, snippet):
@@ -209,35 +209,35 @@ class HS2Api():
 
     handle = self._get_handle(snippet)
     db.cancel_operation(handle)
-    return {'status': 'canceled'}    
+    return {'status': 'canceled'}
 
   @query_error_handler
   def get_log(self, snippet):
     db = self._get_db(snippet)
-      
-    handle = self._get_handle(snippet)    
+
+    handle = self._get_handle(snippet)
     return db.get_log(handle)
-  
+
   def download(self, notebook, snippet, format):
     try:
       db = self._get_db(snippet)
-      handle = self._get_handle(snippet)  
+      handle = self._get_handle(snippet)
       return data_export.download(handle, format, db)
     except Exception, e:
       if not hasattr(e, 'message') or not e.message:
         message = e
       else:
         message = e.message
-      raise PopupException(message, detail='')  
-  
+      raise PopupException(message, detail='')
+
   def _progress(self, snippet, logs):
     if snippet['type'] == 'hive':
       match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
       total = (int(match.group(1)) if match else 1) * 2
-      
+
       started = logs.count('Starting Job')
       ended = logs.count('Ended Job')
-      
+
       return int((started + ended) * 100 / total)
     elif snippet['type'] == 'impala':
       match = re.search('(\d+)% Complete', logs, re.MULTILINE)
@@ -346,7 +346,7 @@ class SparkApi():
       except KeyError:
         data = [[data['text/plain']]]
         meta = [{'name': 'Header', 'type': 'STRING_TYPE', 'comment': ''}]
-        type = 'text'        
+        type = 'text'
       else:
         data = table['data']
         headers = table['headers']
@@ -375,7 +375,7 @@ class SparkApi():
         msg = ''.join(tb)
 
       raise QueryError(msg)
-    
+
   def download(self, notebook, snippet, format):
     try:
       api = get_spark_api(self.user)

+ 2 - 2
apps/spark/src/spark/views.py

@@ -37,7 +37,7 @@ LOG = logging.getLogger(__name__)
 @check_document_access_permission()
 def editor(request):
   notebook_id = request.GET.get('notebook')
-  
+
   if notebook_id:
     notebook = Notebook(document=Document2.objects.get(id=notebook_id)) # Todo perms
   else:
@@ -67,7 +67,7 @@ def editor(request):
 
 def new(request):
   return editor(request)
-  
+
 
 def notebooks(request):
   notebooks = [d.content_object.to_dict() for d in Document.objects.get_docs(request.user, Document2, extra='notebook')]

+ 2 - 0
desktop/core/src/desktop/models.py

@@ -339,6 +339,8 @@ class DocumentManager(models.Manager):
               extra = 'coordinator2'
             elif job.type == 'oozie-bundle2':
               extra = 'bundle2'
+            elif job.type == 'notebook':
+              extra = 'notebook'
             else:
               extra = ''
             doc = Document.objects.link(job, owner=job.owner, name=job.name, description=job.description, extra=extra)

Неке датотеке нису приказане због велике количине промена