Browse Source

[jb] Support killing Spark jobs

Romain Rigaux 10 years ago
parent
commit
bc4d33d

+ 1 - 1
apps/jobbrowser/src/jobbrowser/api.py

@@ -268,7 +268,7 @@ class YarnApi(JobBrowserApi):
         return KilledYarnJob(self.resource_manager_api, job)
         return KilledYarnJob(self.resource_manager_api, job)
 
 
       if job.get('applicationType') == 'SPARK':
       if job.get('applicationType') == 'SPARK':
-        job = SparkJob(job)
+        job = SparkJob(job, self.resource_manager_api)
       elif job.get('applicationType') == 'MAPREDUCE':
       elif job.get('applicationType') == 'MAPREDUCE':
         jobid = jobid.replace('application', 'job')
         jobid = jobid.replace('application', 'job')
 
 

+ 2 - 2
apps/jobbrowser/src/jobbrowser/yarn_models.py

@@ -86,8 +86,8 @@ class Application(object):
 
 
 class SparkJob(Application):
 class SparkJob(Application):
 
 
-  def __init__(self, job):
-    super(SparkJob, self).__init__(job)
+  def __init__(self, job, api=None):
+    super(SparkJob, self).__init__(job, api)
     self._scrape()
     self._scrape()
 
 
   def _history_application_metrics(self, html_doc):
   def _history_application_metrics(self, html_doc):