浏览代码

[spark] Provide a list of job names and links

Romain Rigaux 10 年之前
父节点
当前提交
8a3d70d
共有 2 个文件被更改,包括 11 次插入1 次删除
  1. 5 0
      apps/spark/src/spark/api.py
  2. 6 1
      apps/spark/src/spark/models.py

+ 5 - 0
apps/spark/src/spark/api.py

@@ -19,6 +19,7 @@ import json
 import logging
 
 from django.http import HttpResponse
+from django.core.urlresolvers import reverse
 from django.utils.translation import ugettext as _
 
 from desktop.lib.exceptions_renderable import PopupException
@@ -122,6 +123,10 @@ def get_logs(request):
   db = get_api(request.user, snippet)
   response['logs'] = db.get_log(snippet)
   response['progress'] = db._progress(snippet, response['logs']) if snippet['status'] != 'available' else 100
+  response['job_urls'] = [{
+      'name': job,
+      'url': reverse('jobbrowser.views.single_job', kwargs={'job': job})
+    } for job in db._get_jobs(response['logs'])]
   response['status'] = 0
 
   return HttpResponse(json.dumps(response), mimetype="application/json")

+ 6 - 1
apps/spark/src/spark/models.py

@@ -28,7 +28,7 @@ from beeswax import conf as beeswax_conf
 from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, QueryHistory, HiveServerQueryHistory
 from beeswax.server import dbms
 from beeswax.server.dbms import get_query_server_config, QueryServerException
-from beeswax.views import safe_get_design, save_design
+from beeswax.views import safe_get_design, save_design, _parse_out_hadoop_jobs
 
 from spark.job_server_api import get_api as get_spark_api
 
@@ -257,6 +257,9 @@ class HS2Api():
     else:
       return {'status': 'skipped'}
 
+  def _get_jobs(self, log):
+    return _parse_out_hadoop_jobs(log)
+
 
 # Spark
 
@@ -343,3 +346,5 @@ class SparkApi():
   def close(self, snippet):
     pass
 
+  def _get_jobs(self, log):
+    return []