|
@@ -52,12 +52,15 @@ except:
|
|
|
LOG.warn('Hive is not enabled')
|
|
LOG.warn('Hive is not enabled')
|
|
|
def hiveserver2_impersonation_enabled(): return True
|
|
def hiveserver2_impersonation_enabled(): return True
|
|
|
|
|
|
|
|
-from jobbrowser.conf import SHARE_JOBS
|
|
|
|
|
|
|
+from jobbrowser.conf import LOG_OFFSET, SHARE_JOBS
|
|
|
from jobbrowser.api import get_api, ApplicationNotRunning, JobExpired
|
|
from jobbrowser.api import get_api, ApplicationNotRunning, JobExpired
|
|
|
from jobbrowser.models import Job, JobLinkage, Tracker, Cluster, can_view_job, can_modify_job, LinkJobLogs, can_kill_job
|
|
from jobbrowser.models import Job, JobLinkage, Tracker, Cluster, can_view_job, can_modify_job, LinkJobLogs, can_kill_job
|
|
|
from jobbrowser.yarn_models import Application
|
|
from jobbrowser.yarn_models import Application
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+LOG_OFFSET_BYTES = LOG_OFFSET.get()
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def check_job_permission(view_func):
|
|
def check_job_permission(view_func):
|
|
|
"""
|
|
"""
|
|
|
Ensure that the user has access to the job.
|
|
Ensure that the user has access to the job.
|
|
@@ -280,13 +283,13 @@ def job_attempt_logs(request, job, attempt_index=0):
|
|
|
return render("job_attempt_logs.mako", request, {
|
|
return render("job_attempt_logs.mako", request, {
|
|
|
"attempt_index": attempt_index,
|
|
"attempt_index": attempt_index,
|
|
|
"job": job,
|
|
"job": job,
|
|
|
|
|
+ "log_offset": LOG_OFFSET_BYTES
|
|
|
})
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
|
@check_job_permission
|
|
@check_job_permission
|
|
|
-def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0):
|
|
|
|
|
|
|
+def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=LOG_OFFSET_BYTES):
|
|
|
"""For async log retrieval as Yarn servers are very slow"""
|
|
"""For async log retrieval as Yarn servers are very slow"""
|
|
|
-
|
|
|
|
|
log_link = None
|
|
log_link = None
|
|
|
response = {'status': -1}
|
|
response = {'status': -1}
|
|
|
|
|
|
|
@@ -312,7 +315,7 @@ def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0
|
|
|
if log_link:
|
|
if log_link:
|
|
|
link = '/%s/' % name
|
|
link = '/%s/' % name
|
|
|
params = {}
|
|
params = {}
|
|
|
- if offset and int(offset) >= 0:
|
|
|
|
|
|
|
+ if offset != 0:
|
|
|
params['start'] = offset
|
|
params['start'] = offset
|
|
|
|
|
|
|
|
root = Resource(get_log_client(log_link), urlparse.urlsplit(log_link)[2], urlencode=False)
|
|
root = Resource(get_log_client(log_link), urlparse.urlsplit(log_link)[2], urlencode=False)
|
|
@@ -339,7 +342,7 @@ def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=0
|
|
|
|
|
|
|
|
|
|
|
|
|
@check_job_permission
|
|
@check_job_permission
|
|
|
-def job_single_logs(request, job):
|
|
|
|
|
|
|
+def job_single_logs(request, job, offset=LOG_OFFSET_BYTES):
|
|
|
"""
|
|
"""
|
|
|
Try to smartly detect the most useful task attempt (e.g. Oozie launcher, failed task) and get its MR logs.
|
|
Try to smartly detect the most useful task attempt (e.g. Oozie launcher, failed task) and get its MR logs.
|
|
|
"""
|
|
"""
|
|
@@ -366,7 +369,9 @@ def job_single_logs(request, job):
|
|
|
if task is None or not task.taskAttemptIds:
|
|
if task is None or not task.taskAttemptIds:
|
|
|
raise PopupException(_("No tasks found for job %(id)s.") % {'id': job.jobId})
|
|
raise PopupException(_("No tasks found for job %(id)s.") % {'id': job.jobId})
|
|
|
|
|
|
|
|
- return single_task_attempt_logs(request, **{'job': job.jobId, 'taskid': task.taskId, 'attemptid': task.taskAttemptIds[-1]})
|
|
|
|
|
|
|
+ params = {'job': job.jobId, 'taskid': task.taskId, 'attemptid': task.taskAttemptIds[-1], 'offset': offset}
|
|
|
|
|
+
|
|
|
|
|
+ return single_task_attempt_logs(request, **params)
|
|
|
|
|
|
|
|
|
|
|
|
|
@check_job_permission
|
|
@check_job_permission
|
|
@@ -442,7 +447,7 @@ def single_task_attempt(request, job, taskid, attemptid):
|
|
|
})
|
|
})
|
|
|
|
|
|
|
|
@check_job_permission
|
|
@check_job_permission
|
|
|
-def single_task_attempt_logs(request, job, taskid, attemptid):
|
|
|
|
|
|
|
+def single_task_attempt_logs(request, job, taskid, attemptid, offset=LOG_OFFSET_BYTES):
|
|
|
jt = get_api(request.user, request.jt)
|
|
jt = get_api(request.user, request.jt)
|
|
|
|
|
|
|
|
job_link = jt.get_job_link(job.jobId)
|
|
job_link = jt.get_job_link(job.jobId)
|
|
@@ -463,7 +468,7 @@ def single_task_attempt_logs(request, job, taskid, attemptid):
|
|
|
diagnostic_log = ", ".join(task.diagnosticMap[attempt.attemptId])
|
|
diagnostic_log = ", ".join(task.diagnosticMap[attempt.attemptId])
|
|
|
logs = [diagnostic_log]
|
|
logs = [diagnostic_log]
|
|
|
# Add remaining logs
|
|
# Add remaining logs
|
|
|
- logs += [section.strip() for section in attempt.get_task_log()]
|
|
|
|
|
|
|
+ logs += [section.strip() for section in attempt.get_task_log(offset=offset)]
|
|
|
log_tab = [i for i, log in enumerate(logs) if log]
|
|
log_tab = [i for i, log in enumerate(logs) if log]
|
|
|
if log_tab:
|
|
if log_tab:
|
|
|
first_log_tab = log_tab[0]
|
|
first_log_tab = log_tab[0]
|