Browse Source

HUE-4202 [jb] Enable offset param for fetching jobbrowser logs

Jenny Kim 9 years ago
parent
commit
5aff350

+ 6 - 0
apps/jobbrowser/src/jobbrowser/conf.py

@@ -32,3 +32,9 @@ DISABLE_KILLING_JOBS = Config(
   type=coerce_bool,
   type=coerce_bool,
   help=_('Disable the job kill button for all users in the job browser.'))
   help=_('Disable the job kill button for all users in the job browser.'))
 
 
+LOG_OFFSET = Config(
+  key='log_offset',
+  default=-1000000,
+  type=int,
+  help=_('Offset in bytes where a negative offset will fetch the last N bytes for the given log file (default 1MB).')
+)

+ 3 - 3
apps/jobbrowser/src/jobbrowser/templates/job_attempt_logs.mako

@@ -116,7 +116,7 @@ ${ comps.menubar() }
     initLogsElement($("#stderr-container"));
     initLogsElement($("#stderr-container"));
 
 
     function refreshSyslogs() {
     function refreshSyslogs() {
-      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='syslog', offset=0) }", function (data) {
+      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='syslog', offset=log_offset) }", function (data) {
         if (data && data.log) {
         if (data && data.log) {
           appendAndScroll($("#syslog-container"), data.log);
           appendAndScroll($("#syslog-container"), data.log);
           window.setTimeout(refreshSyslogs, 5000);
           window.setTimeout(refreshSyslogs, 5000);
@@ -125,7 +125,7 @@ ${ comps.menubar() }
     }
     }
 
 
     function refreshStdout() {
     function refreshStdout() {
-      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='stdout', offset=0) }", function (data) {
+      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='stdout', offset=log_offset) }", function (data) {
         if (data && data.log) {
         if (data && data.log) {
           appendAndScroll($("#stdout-container"), data.log);
           appendAndScroll($("#stdout-container"), data.log);
           window.setTimeout(refreshStdout, 5000);
           window.setTimeout(refreshStdout, 5000);
@@ -134,7 +134,7 @@ ${ comps.menubar() }
     }
     }
 
 
     function refreshStderr() {
     function refreshStderr() {
-      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='stderr', offset=0) }", function (data) {
+      $.getJSON("${ url("jobbrowser.views.job_attempt_logs_json", job=job.jobId, attempt_index=attempt_index, name='stderr', offset=log_offset) }", function (data) {
         if (data && data.log) {
         if (data && data.log) {
           appendAndScroll($("#stderr-container"), data.log);
           appendAndScroll($("#stderr-container"), data.log);
           window.setTimeout(refreshStderr, 5000);
           window.setTimeout(refreshStderr, 5000);

+ 1 - 1
apps/jobbrowser/src/jobbrowser/urls.py

@@ -36,7 +36,7 @@ urlpatterns = patterns('jobbrowser.views',
 
 
   # MR2 specific
   # MR2 specific
   url(r'^jobs/(?P<job>\w+)/job_attempt_logs/(?P<attempt_index>\d+)$', 'job_attempt_logs', name='job_attempt_logs'),
   url(r'^jobs/(?P<job>\w+)/job_attempt_logs/(?P<attempt_index>\d+)$', 'job_attempt_logs', name='job_attempt_logs'),
-  url(r'^jobs/(?P<job>\w+)/job_attempt_logs_json/(?P<attempt_index>\d+)/(?P<name>\w+)?/(?P<offset>\d+)?$', 'job_attempt_logs_json', name='job_attempt_logs_json'),
+  url(r'^jobs/(?P<job>\w+)/job_attempt_logs_json/(?P<attempt_index>\d+)/(?P<name>\w+)?/(?P<offset>[\d-]+)?$', 'job_attempt_logs_json', name='job_attempt_logs_json'),
   url(r'^jobs/(?P<jobid>\w+)/job_not_assigned/(?P<path>.+)$','job_not_assigned', name='job_not_assigned'),
   url(r'^jobs/(?P<jobid>\w+)/job_not_assigned/(?P<path>.+)$','job_not_assigned', name='job_not_assigned'),
 
 
   # Unused
   # Unused

+ 13 - 8
apps/jobbrowser/src/jobbrowser/views.py

@@ -52,12 +52,15 @@ except:
   LOG.warn('Hive is not enabled')
   LOG.warn('Hive is not enabled')
   def hiveserver2_impersonation_enabled(): return True
   def hiveserver2_impersonation_enabled(): return True
 
 
-from jobbrowser.conf import SHARE_JOBS
+from jobbrowser.conf import LOG_OFFSET, SHARE_JOBS
 from jobbrowser.api import get_api, ApplicationNotRunning, JobExpired
 from jobbrowser.api import get_api, ApplicationNotRunning, JobExpired
 from jobbrowser.models import Job, JobLinkage, Tracker, Cluster, can_view_job, can_modify_job, LinkJobLogs, can_kill_job
 from jobbrowser.models import Job, JobLinkage, Tracker, Cluster, can_view_job, can_modify_job, LinkJobLogs, can_kill_job
 from jobbrowser.yarn_models import Application
 from jobbrowser.yarn_models import Application
 
 
 
 
+LOG_OFFSET_BYTES = LOG_OFFSET.get()
+
+
 def check_job_permission(view_func):
 def check_job_permission(view_func):
   """
   """
   Ensure that the user has access to the job.
   Ensure that the user has access to the job.
@@ -280,13 +283,13 @@ def job_attempt_logs(request, job, attempt_index=0):
   return render("job_attempt_logs.mako", request, {
   return render("job_attempt_logs.mako", request, {
     "attempt_index": attempt_index,
     "attempt_index": attempt_index,
     "job": job,
     "job": job,
+    "log_offset": LOG_OFFSET_BYTES
   })
   })
 
 
 
 
 @check_job_permission
 @check_job_permission
-def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0):
+def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=LOG_OFFSET_BYTES):
   """For async log retrieval as Yarn servers are very slow"""
   """For async log retrieval as Yarn servers are very slow"""
-
   log_link = None
   log_link = None
   response = {'status': -1}
   response = {'status': -1}
 
 
@@ -312,7 +315,7 @@ def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0
   if log_link:
   if log_link:
     link = '/%s/' % name
     link = '/%s/' % name
     params = {}
     params = {}
-    if offset and int(offset) >= 0:
+    if offset != 0:
       params['start'] = offset
       params['start'] = offset
 
 
     root = Resource(get_log_client(log_link), urlparse.urlsplit(log_link)[2], urlencode=False)
     root = Resource(get_log_client(log_link), urlparse.urlsplit(log_link)[2], urlencode=False)
@@ -339,7 +342,7 @@ def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0
 
 
 
 
 @check_job_permission
 @check_job_permission
-def job_single_logs(request, job):
+def job_single_logs(request, job, offset=LOG_OFFSET_BYTES):
   """
   """
   Try to smartly detect the most useful task attempt (e.g. Oozie launcher, failed task) and get its MR logs.
   Try to smartly detect the most useful task attempt (e.g. Oozie launcher, failed task) and get its MR logs.
   """
   """
@@ -366,7 +369,9 @@ def job_single_logs(request, job):
   if task is None or not task.taskAttemptIds:
   if task is None or not task.taskAttemptIds:
     raise PopupException(_("No tasks found for job %(id)s.") % {'id': job.jobId})
     raise PopupException(_("No tasks found for job %(id)s.") % {'id': job.jobId})
 
 
-  return single_task_attempt_logs(request, **{'job': job.jobId, 'taskid': task.taskId, 'attemptid': task.taskAttemptIds[-1]})
+  params = {'job': job.jobId, 'taskid': task.taskId, 'attemptid': task.taskAttemptIds[-1], 'offset': offset}
+
+  return single_task_attempt_logs(request, **params)
 
 
 
 
 @check_job_permission
 @check_job_permission
@@ -442,7 +447,7 @@ def single_task_attempt(request, job, taskid, attemptid):
     })
     })
 
 
 @check_job_permission
 @check_job_permission
-def single_task_attempt_logs(request, job, taskid, attemptid):
+def single_task_attempt_logs(request, job, taskid, attemptid, offset=LOG_OFFSET_BYTES):
   jt = get_api(request.user, request.jt)
   jt = get_api(request.user, request.jt)
 
 
   job_link = jt.get_job_link(job.jobId)
   job_link = jt.get_job_link(job.jobId)
@@ -463,7 +468,7 @@ def single_task_attempt_logs(request, job, taskid, attemptid):
       diagnostic_log =  ", ".join(task.diagnosticMap[attempt.attemptId])
       diagnostic_log =  ", ".join(task.diagnosticMap[attempt.attemptId])
     logs = [diagnostic_log]
     logs = [diagnostic_log]
     # Add remaining logs
     # Add remaining logs
-    logs += [section.strip() for section in attempt.get_task_log()]
+    logs += [section.strip() for section in attempt.get_task_log(offset=offset)]
     log_tab = [i for i, log in enumerate(logs) if log]
     log_tab = [i for i, log in enumerate(logs) if log]
     if log_tab:
     if log_tab:
       first_log_tab = log_tab[0]
       first_log_tab = log_tab[0]

+ 1 - 1
apps/jobbrowser/src/jobbrowser/yarn_models.py

@@ -439,7 +439,7 @@ class Attempt:
     for name in ('stdout', 'stderr', 'syslog'):
     for name in ('stdout', 'stderr', 'syslog'):
       link = '/%s/' % name
       link = '/%s/' % name
       params = {}
       params = {}
-      if int(offset) >= 0:
+      if int(offset) != 0:
         params['start'] = offset
         params['start'] = offset
 
 
       response = None
       response = None

+ 9 - 9
apps/pig/src/pig/api.py

@@ -44,8 +44,7 @@ class OozieApi(object):
   """
   """
   WORKFLOW_NAME = 'pig-app-hue-script'
   WORKFLOW_NAME = 'pig-app-hue-script'
   RE_LOG_END = re.compile('(<<< Invocation of Pig command completed <<<|<<< Invocation of Main class completed <<<)')
   RE_LOG_END = re.compile('(<<< Invocation of Pig command completed <<<|<<< Invocation of Main class completed <<<)')
-  RE_LOG_START_RUNNING = re.compile('>>> Invoking Pig command line now >>>(.+?)(<<< Invocation of Pig command completed <<<|<<< Invocation of Main class completed)', re.M | re.DOTALL)
-  RE_LOG_START_FINISHED = re.compile('(>>> Invoking Pig command line now >>>)', re.M | re.DOTALL)
+  RE_LOG_START_RUNNING = re.compile('(Pig script \[(?:[\w.-]+)\] content:.+)', re.M | re.DOTALL)
   MAX_DASHBOARD_JOBS = 100
   MAX_DASHBOARD_JOBS = 100
 
 
   def __init__(self, fs, jt, user):
   def __init__(self, fs, jt, user):
@@ -178,9 +177,13 @@ class OozieApi(object):
 
 
           if data and 'logs' in data:
           if data and 'logs' in data:
             matched_logs = self._match_logs(data)
             matched_logs = self._match_logs(data)
-            logs[action.name] = LinkJobLogs._make_links(matched_logs)
-            is_really_done = OozieApi.RE_LOG_END.search(data['logs'][1]) is not None
 
 
+            if matched_logs:
+              logs[action.name] = LinkJobLogs._make_links(matched_logs)
+
+            is_really_done = OozieApi.RE_LOG_END.search(data['logs'][1]) is not None
+            if is_really_done and not matched_logs:
+              LOG.warn('Unable to scrape full pig logs, try increasing the jobbrowser log_offset configuration value.')
       except Exception, e:
       except Exception, e:
         LOG.error('An error occurred while watching the job running: %(error)s' % {'error': e})
         LOG.error('An error occurred while watching the job running: %(error)s' % {'error': e})
         is_really_done = True
         is_really_done = True
@@ -207,13 +210,10 @@ class OozieApi(object):
     """Difficult to match multi lines of text"""
     """Difficult to match multi lines of text"""
     logs = data['logs'][1]
     logs = data['logs'][1]
 
 
-    if OozieApi.RE_LOG_END.search(logs):
+    if OozieApi.RE_LOG_START_RUNNING.search(logs):
       return re.search(OozieApi.RE_LOG_START_RUNNING, logs).group(1).strip()
       return re.search(OozieApi.RE_LOG_START_RUNNING, logs).group(1).strip()
     else:
     else:
-      group = re.search(OozieApi.RE_LOG_START_FINISHED, logs)
-      i = logs.index(group.group(1)) + len(group.group(1))
-      return logs[i:].strip()
-
+      return None
 
 
   def massaged_jobs_for_json(self, request, oozie_jobs, hue_jobs):
   def massaged_jobs_for_json(self, request, oozie_jobs, hue_jobs):
     jobs = []
     jobs = []

+ 1 - 2
apps/pig/src/pig/views.py

@@ -21,7 +21,6 @@ import logging
 from django.core.urlresolvers import reverse
 from django.core.urlresolvers import reverse
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
 from django.views.decorators.csrf import ensure_csrf_cookie
 from django.views.decorators.csrf import ensure_csrf_cookie
-from django.views.decorators.http import require_http_methods
 
 
 from desktop.lib.django_util import JsonResponse, render
 from desktop.lib.django_util import JsonResponse, render
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.exceptions_renderable import PopupException
@@ -215,7 +214,7 @@ def delete(request):
 @show_oozie_error
 @show_oozie_error
 def watch(request, job_id):
 def watch(request, job_id):
   oozie_workflow = check_job_access_permission(request, job_id)
   oozie_workflow = check_job_access_permission(request, job_id)
-  logs, workflow_actions, is_really_done = api.get(request.jt, request.jt, request.user).get_log(request, oozie_workflow)
+  logs, workflow_actions, is_really_done = api.get(request.fs, request.jt, request.user).get_log(request, oozie_workflow)
   output = get_workflow_output(oozie_workflow, request.fs)
   output = get_workflow_output(oozie_workflow, request.fs)
 
 
   workflow = {
   workflow = {

+ 2 - 0
desktop/conf.dist/hue.ini

@@ -1102,6 +1102,8 @@
   # Whether to disalbe the job kill button for all users in the jobbrowser
   # Whether to disalbe the job kill button for all users in the jobbrowser
   ## disable_killing_jobs=false
   ## disable_killing_jobs=false
 
 
+  # Offset in bytes where a negative offset will fetch the last N bytes for the given log file (default 1MB).
+  ## log_offset=-1000000
 
 
 ###########################################################################
 ###########################################################################
 # Settings to configure Sentry / Security App.
 # Settings to configure Sentry / Security App.

+ 4 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -960,6 +960,8 @@
   # Use Cron format for defining the frequency of a Coordinator instead of the old frequency number/unit.
   # Use Cron format for defining the frequency of a Coordinator instead of the old frequency number/unit.
   ## enable_cron_scheduling=true
   ## enable_cron_scheduling=true
 
 
+  # Offset in bytes where a negative offset will fetch the last N bytes for the given log file (default 1MB).
+  ## log_offset=-1000000
 
 
 ###########################################################################
 ###########################################################################
 # Settings to configure the Filebrowser app
 # Settings to configure the Filebrowser app
@@ -1106,6 +1108,8 @@
   # Whether to disalbe the job kill button for all users in the jobbrowser
   # Whether to disalbe the job kill button for all users in the jobbrowser
   ## disable_killing_jobs=false
   ## disable_killing_jobs=false
 
 
+  # Offset in bytes where a negative offset will fetch the last N bytes for the given log file (default 1MB).
+  ## log_offset=-1000000
 
 
 ###########################################################################
 ###########################################################################
 # Settings to configure Sentry / Security App.
 # Settings to configure Sentry / Security App.