Эх сурвалжийг харах

HUE-2201 [jobbrowser] Add a kill YARN job button

Killed jobs in YARN don't have much information. We try to only pull
the basics for now (it used to just crash before).
Romain Rigaux 11 жил өмнө
parent
commit
800c389

+ 3 - 1
apps/jobbrowser/src/jobbrowser/api.py

@@ -29,7 +29,7 @@ import hadoop.yarn.node_manager_api as node_manager_api
 
 from jobbrowser.conf import SHARE_JOBS
 from jobbrowser.models import Job, JobLinkage, TaskList, Tracker
-from jobbrowser.yarn_models import Application, Job as YarnJob, Container
+from jobbrowser.yarn_models import Application, Job as YarnJob, KilledJob as KilledYarnJob, Container
 from hadoop.cluster import get_next_ha_mrcluster, get_next_ha_yarncluster
 from desktop.lib.exceptions_renderable import PopupException
 
@@ -260,6 +260,8 @@ class YarnApi(JobBrowserApi):
 
       if job['state'] == 'ACCEPTED':
         raise ApplicationNotRunning(jobid, job)
+      elif job['state'] == 'KILLED':
+        return KilledYarnJob(self.resource_manager_api, job)
 
       # MR id, assume 'applicationType': 'MAPREDUCE'
       jobid = jobid.replace('application', 'job')

+ 19 - 15
apps/jobbrowser/src/jobbrowser/templates/jobs.mako

@@ -285,6 +285,7 @@ ${ components.menubar() }
 
     function callJsonData(callback, justRunning) {
       var _url = "?format=json";
+
       if (justRunning == undefined) {
         if ($(".btn-status.active").length > 0) {
           _url += "&state=" + $(".btn-status.active").data("value");
@@ -297,13 +298,17 @@ ${ components.menubar() }
         isUpdating = true;
         _url += "&state=running";
       }
+
       _url += "&user=" + $("#userFilter").val().trim();
+
       if ($("#textFilter").val().trim() != "") {
         _url += "&text=" + $("#textFilter").val().trim();
       }
+
       if ($("#showRetired").is(":checked")) {
         _url += "&retired=on";
       }
+
       $.getJSON(_url, callback);
     }
 
@@ -339,20 +344,19 @@ ${ components.menubar() }
       var _this = $(this);
       _this.attr("data-loading-text", _this.text() + " ...");
       _this.button("loading");
-      $.post(_this.data("killurl"),
-              {
-                "format": "json"
-              },
-              function (response) {
-                _this.button("reset");
-                $("#killModal").modal("hide");
-                if (response.status != 0) {
-                  $(document).trigger("error", "${ _('There was a problem killing this job.') }");
-                }
-                else {
-                  callJobDetails({ url: _this.data("url")});
-                }
-              }
+      $.post(_this.data("killurl"), {
+          "format": "json"
+        },
+        function (response) {
+          _this.button("reset");
+          $("#killModal").modal("hide");
+          if (response.status != 0) {
+            $(document).trigger("error", "${ _('There was a problem killing this job.') }");
+          }
+          else {
+            callJobDetails({ url: _this.data("url")});
+          }
+        }
       );
     });
 
@@ -364,7 +368,7 @@ ${ components.menubar() }
     callJsonData(populateTable);
 
     var _runningInterval = window.setInterval(function () {
-      if (!isUpdating) {
+      if (! isUpdating) {
         callJsonData(updateRunning, true);
       }
     }, 2000);

+ 14 - 1
apps/jobbrowser/src/jobbrowser/tests.py

@@ -453,6 +453,14 @@ class TestMapReduce2NoHadoop:
     assert_false(can_view_job('test2', response.context['job']))
     assert_false(can_modify_job('test2', response.context['job']))
 
+  def test_kill_job(self):
+    job_id = 'application_1356251510842_0054'
+    try:
+      response = self.c.post('/jobbrowser/jobs/%s/kill?format=json' % job_id)
+      assert_equal(json.loads(response.content), {"status": 0})
+    finally:
+      MockResourceManagerApi.APPS[job_id]['state'] = 'RUNNING'
+
 
 class MockResourceManagerApi:
   APPS = {
@@ -597,6 +605,11 @@ class MockMapreduce2Api(object):
        }
     }
 
+  def kill(self, job_id):
+    job_id = job_id.replace('job', 'application')
+    MockResourceManagerApi.APPS[job_id]['state'] = 'KILLED'
+    return {}
+
 
 class MockMapreduceApi(MockMapreduce2Api):
   def job(self, user, job_id):
@@ -606,7 +619,7 @@ class MockMapreduceApi(MockMapreduce2Api):
               u'reducesCompleted': 0, u'mapsRunning': 1, u'id': u'job_1356251510842_0054', u'successfulReduceAttempts': 0, u'successfulMapAttempts': 0,
               u'uberized': False, u'reducesTotal': 1, u'elapsedTime': 3426, u'mapsPending': 0, u'state': u'RUNNING', u'failedReduceAttempts': 0,
               u'mapsCompleted': 0, u'killedMapAttempts': 0, u'killedReduceAttempts': 0, u'runningReduceAttempts': 0, u'failedMapAttempts': 0, u'mapsTotal': 1,
-              u'user': u'romain', u'startTime': 1357152972886, u'reducesPending': 1, u'reduceProgress': 0.0, u'finishTime': 0,
+              u'user': u'test', u'startTime': 1357152972886, u'reducesPending': 1, u'reduceProgress': 0.0, u'finishTime': 0,
               u'name': u'select avg(salary) from sample_07(Stage-1)', u'reducesRunning': 0, u'newMapAttempts': 0, u'diagnostics': u'', u'mapProgress': 0.0,
               u'runningMapAttempts': 1, u'newReduceAttempts': 1,
               "acls" : [{

+ 7 - 7
apps/jobbrowser/src/jobbrowser/views.py

@@ -140,7 +140,7 @@ def massage_job_for_json(job, request):
     'finishTimeFormatted': hasattr(job, 'finishTimeFormatted') and job.finishTimeFormatted or '',
     'durationFormatted': hasattr(job, 'durationFormatted') and job.durationFormatted or '',
     'durationMs': hasattr(job, 'durationInMillis') and job.durationInMillis or '',
-    'canKill': (job.status.lower() == 'running' or job.status.lower() == 'pending') and not job.is_mr2 and (request.user.is_superuser or request.user.username == job.user or can_modify_job(request.user.username, job)),
+    'canKill': job.status.lower() in ('running', 'pending') and (request.user.is_superuser or request.user.username == job.user or can_modify_job(request.user.username, job)),
     'killUrl': job.jobId and reverse('jobbrowser.views.kill_job', kwargs={'job': job.jobId}) or ''
   }
   return job
@@ -194,17 +194,19 @@ def job_counters(request, job):
 @check_job_permission
 def kill_job(request, job):
   if request.method != "POST":
-    raise Exception(_("kill_job may only be invoked with a POST (got a %(method)s).") % dict(method=request.method))
+    raise Exception(_("kill_job may only be invoked with a POST (got a %(method)s).") % {'method': request.method})
 
   if job.user != request.user.username and not request.user.is_superuser:
     access_warn(request, _('Insufficient permission'))
-    raise MessageException(_("Permission denied.  User %(username)s cannot delete user %(user)s's job.") %
-                           dict(username=request.user.username, user=job.user))
+    raise MessageException(_("Permission denied.  User %(username)s cannot delete user %(user)s's job.") % {'username': request.user.username, 'user': job.user})
 
   job.kill()
+
   cur_time = time.time()
+  api = get_api(request.user, request.jt)
+
   while time.time() - cur_time < 15:
-    job = Job.from_id(jt=request.jt, jobid=job.jobId)
+    job = api.get_job(jobid=job.jobId)
 
     if job.status not in ["RUNNING", "QUEUED"]:
       if request.REQUEST.get("next"):
@@ -214,11 +216,9 @@ def kill_job(request, job):
       else:
         raise MessageException("Job Killed")
     time.sleep(1)
-    job = Job.from_id(jt=request.jt, jobid=job.jobId)
 
   raise Exception(_("Job did not appear as killed within 15 seconds."))
 
-
 @check_job_permission
 def job_attempt_logs(request, job, attempt_index=0):
   return render("job_attempt_logs.mako", request, {

+ 42 - 3
apps/jobbrowser/src/jobbrowser/yarn_models.py

@@ -72,7 +72,7 @@ class Application:
     setattr(self, 'durationFormatted', format_duration_in_millis(self.durationInMillis))
 
 
-class Job:
+class Job(object):
 
   def __init__(self, api, attrs):
     self.api = api
@@ -91,6 +91,9 @@ class Job:
     setattr(self, 'is_retired', False)
     setattr(self, 'maps_percent_complete', None)
     setattr(self, 'reduces_percent_complete', None)
+    if self.state in ('FINISHED', 'FAILED', 'KILLED'):
+      setattr(self, 'finishTime', self.finishedTime)
+      setattr(self, 'startTime', self.startedTime)
     setattr(self, 'duration', self.finishTime - self.startTime)
     setattr(self, 'finishTimeFormatted', format_unixtime_ms(self.finishTime))
     setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startTime))
@@ -102,6 +105,9 @@ class Job:
     if not hasattr(self, 'acls'):
       setattr(self, 'acls', {})
 
+  def kill(self):
+    return self.api.kill(self.id)
+
   @property
   def counters(self):
     counters = self.api.counters(self.id)
@@ -126,8 +132,8 @@ class Job:
 
   def filter_tasks(self, task_types=None, task_states=None, task_text=None):
     return [Task(self, task) for task in self.api.tasks(self.id).get('tasks', {}).get('task', [])
-            if (not task_types or task['type'].lower() in task_types) and
-               (not task_states or task['state'].lower() in task_states)]
+          if (not task_types or task['type'].lower() in task_types) and
+             (not task_states or task['state'].lower() in task_states)]
 
   @property
   def job_attempts(self):
@@ -136,6 +142,39 @@ class Job:
     return self._job_attempts
 
 
+class KilledJob(Job):
+
+  def __init__(self, api, attrs):
+    self._fixup()
+
+    super(KilledJob, self).__init__(api, attrs)
+    super(KilledJob, self)._fixup()
+
+    setattr(self, 'jobId_short', self.jobId.replace('application_', ''))
+
+  def _fixup(self):
+    if not hasattr(self, 'mapsCompleted'):
+      setattr(self, 'mapsCompleted', 1)
+    if not hasattr(self, 'reducesCompleted'):
+      setattr(self, 'reducesCompleted', 1)
+
+
+  @property
+  def counters(self):
+    return {}
+
+  @property
+  def full_job_conf(self):
+    return {'property': []}
+
+  def filter_tasks(self, task_types=None, task_states=None, task_text=None):
+    return []
+
+  @property
+  def job_attempts(self):
+    return {'jobAttempt': []}
+
+
 class Task:
 
   def __init__(self, job, attrs):

+ 5 - 0
desktop/libs/hadoop/src/hadoop/yarn/mapreduce_api.py

@@ -22,6 +22,7 @@ import threading
 from desktop.lib.rest.http_client import HttpClient
 from desktop.lib.rest.resource import Resource
 from hadoop import cluster
+from hadoop.yarn.resource_manager_api import get_resource_manager
 
 
 LOG = logging.getLogger(__name__)
@@ -108,3 +109,7 @@ class MapreduceApi(object):
     app_id = job_id.replace('job', 'application')
     job_id = job_id.replace('application', 'job')
     return self._root.get('%(app_id)s/ws/%(version)s/mapreduce/jobs/%(job_id)s/tasks/%(task_id)s/attempts/%(attempt_id)s' % {'app_id': app_id, 'job_id': job_id, 'task_id': task_id, 'attempt_id': attempt_id, 'version': _API_VERSION}, headers={'Accept': _JSON_CONTENT_TYPE})
+
+  def kill(self, job_id):
+    app_id = job_id.replace('job', 'application')
+    get_resource_manager().kill(app_id) # We need to call the RM

+ 4 - 0
desktop/libs/hadoop/src/hadoop/yarn/resource_manager_api.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import json
 import logging
 import posixpath
 import threading
@@ -82,3 +83,6 @@ class ResourceManagerApi(object):
 
   def app(self, app_id):
     return self._root.get('cluster/apps/%(app_id)s' % {'app_id': app_id}, headers={'Accept': _JSON_CONTENT_TYPE})
+
+  def kill(self, app_id):
+    return self._root.put('cluster/apps/%(app_id)s/state' % {'app_id': app_id}, data=json.dumps({'state': 'KILLED'}), contenttype=_JSON_CONTENT_TYPE)