models.py 58 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import logging
  18. import time
  19. try:
  20. import json
  21. except ImportError:
  22. import simplejson as json
  23. import re
  24. from datetime import datetime, timedelta
  25. from string import Template
  26. from itertools import chain
  27. from django.db import models
  28. from django.core.urlresolvers import reverse
  29. from django.core.validators import RegexValidator
  30. from django.contrib.auth.models import User
  31. from django.utils.translation import ugettext as _, ugettext_lazy as _t
  32. from desktop.log.access import access_warn
  33. from desktop.lib import django_mako
  34. from desktop.lib.exceptions_renderable import PopupException
  35. from hadoop.fs.exceptions import WebHdfsException
  36. from hadoop.fs.hadoopfs import Hdfs
  37. from liboozie.submittion import Submission
  38. from oozie.management.commands import oozie_setup
  39. from oozie.conf import REMOTE_SAMPLE_DIR, SHARE_JOBS
  40. from timezones import TIMEZONES
  41. LOG = logging.getLogger(__name__)
  42. PATH_MAX = 512
  43. name_validator = RegexValidator(regex='[a-zA-Z_][\-_a-zA-Z0-9]{1,39}',
  44. message=_('Enter a valid value: combination of 2 and 40 letters and digits starting by a letter'))
  45. """
  46. Permissions:
  47. A Workflow/Coordinator can be accessed/submitted by its owner, a superuser or by anyone if its 'is_shared'
  48. property and SHARE_JOBS are set to True.
  49. A Workflow/Coordinator can be modified only by its owner or a superuser.
  50. Permissions checking happens by adding the decorators.
  51. """
  52. class JobManager(models.Manager):
  53. def is_accessible_or_exception(self, request, job_id, exception_class=PopupException):
  54. if job_id is None:
  55. return
  56. try:
  57. job = Job.objects.select_related().get(pk=job_id).get_full_node()
  58. if job.is_accessible(request.user):
  59. return job
  60. else:
  61. message = _("Permission denied. %(username)s don't have the permissions to access job %(id)s") % \
  62. {'username': request.user.username, 'id': job.id}
  63. access_warn(request, message)
  64. request.error(message)
  65. raise exception_class(message)
  66. except Job.DoesNotExist:
  67. raise exception_class(_('job %(id)s not exist') % {'id': job_id})
  68. def can_edit_or_exception(self, request, job, exception_class=PopupException):
  69. if job.is_editable(request.user):
  70. return True
  71. else:
  72. raise exception_class(_('Not allowed to modified this job'))
  73. class Job(models.Model):
  74. """
  75. Base class for Workflows and Coordinators.
  76. http://incubator.apache.org/oozie/docs/3.2.0-incubating/docs/index.html
  77. """
  78. owner = models.ForeignKey(User, db_index=True, verbose_name=_t('Owner'), help_text=_t('Person who can modify the job.'))
  79. name = models.CharField(max_length=40, blank=False, validators=[name_validator],
  80. help_text=_t('Name of the job, which must be unique per user.'), verbose_name=_t('Name'))
  81. description = models.CharField(max_length=1024, blank=True, verbose_name=_t('Description'),
  82. help_text=_t('The purpose of the job.'))
  83. last_modified = models.DateTimeField(auto_now=True, db_index=True, verbose_name=_t('Last modified'))
  84. schema_version = models.CharField(max_length=128, verbose_name=_t('Schema version'),
  85. help_text=_t('The version of the XML schema used to talk to Oozie.'))
  86. deployment_dir = models.CharField(max_length=1024, blank=True, verbose_name=_t('HDFS deployment directory'),
  87. help_text=_t('The path on the HDFS where all the workflows and '
  88. 'dependencies must be uploaded.'))
  89. is_shared = models.BooleanField(default=False, db_index=True, verbose_name=_t('Is shared'),
  90. help_text=_t('Enable other users to have access to this job.'))
  91. parameters = models.TextField(default='[{"name":"oozie.use.system.libpath","value":"true"}]', verbose_name=_t('Oozie parameters'),
  92. help_text=_t('Parameters used at the submission time (e.g. market=US, oozie.use.system.libpath=true).'))
  93. objects = JobManager()
  94. unique_together = ('owner', 'name')
  95. def save(self):
  96. super(Job, self).save()
  97. if not self.deployment_dir:
  98. default_dir = Hdfs.join(REMOTE_SAMPLE_DIR.get(), '_%s_-oozie-%s-%s' % (self.owner.username, self.id, time.time()))
  99. self.deployment_dir = default_dir
  100. super(Job, self).save()
  101. def is_deployed(self, fs):
  102. return self.deployment_dir != '' and fs.exists(self.deployment_dir)
  103. def __str__(self):
  104. return '%s - %s' % (self.name, self.owner)
  105. def get_full_node(self):
  106. try:
  107. return self.workflow
  108. except Workflow.DoesNotExist:
  109. pass
  110. try:
  111. return self.coordinator
  112. except Coordinator.DoesNotExist:
  113. pass
  114. def get_type(self):
  115. return self.get_full_node().get_type()
  116. def get_absolute_url(self):
  117. return self.get_full_node().get_absolute_url()
  118. def get_parameters(self):
  119. return json.loads(self.parameters)
  120. @property
  121. def status(self):
  122. if self.is_shared:
  123. return _('shared')
  124. else:
  125. return _('personal')
  126. def find_all_parameters(self):
  127. params = self.find_parameters()
  128. for param in self.get_parameters():
  129. params[param['name'].strip()] = param['value']
  130. return [{'name': name, 'value': value} for name, value in params.iteritems()]
  131. def is_accessible(self, user):
  132. return user.is_superuser or self.owner == user or (SHARE_JOBS.get() and self.is_shared)
  133. def is_editable(self, user):
  134. """Only owners or admins can modify a job."""
  135. return user.is_superuser or self.owner == user
  136. class WorkflowManager(models.Manager):
  137. def new_workflow(self, owner):
  138. workflow = Workflow(owner=owner, schema_version='uri:oozie:workflow:0.4')
  139. kill = Kill(name='kill', workflow=workflow, node_type=Kill.node_type)
  140. end = End(name='end', workflow=workflow, node_type=End.node_type)
  141. start = Start(name='start', workflow=workflow, node_type=Start.node_type)
  142. to = Link(parent=start, child=end, name='to')
  143. related = Link(parent=start, child=end, name='related')
  144. workflow.start = start
  145. workflow.end = end
  146. return workflow
  147. def initialize(self, workflow, fs):
  148. Kill.objects.create(name='kill', workflow=workflow, node_type=Kill.node_type)
  149. end = End.objects.create(name='end', workflow=workflow, node_type=End.node_type)
  150. start = Start.objects.create(name='start', workflow=workflow, node_type=Start.node_type)
  151. link = Link(parent=start, child=end, name='to')
  152. link.save()
  153. Link.objects.create(parent=start, child=end, name='related')
  154. workflow.start = start
  155. workflow.end = end
  156. workflow.save()
  157. self.check_workspace(workflow, fs)
  158. def check_workspace(self, workflow, fs):
  159. oozie_setup.create_directories(fs)
  160. if workflow.is_shared:
  161. perms = 0755
  162. else:
  163. perms = 0711
  164. Submission(workflow.owner, workflow, fs, {})._create_dir(workflow.deployment_dir, perms=perms)
  165. def destroy(self, workflow, fs):
  166. Submission(workflow.owner, workflow, fs, {}).remove_deployment_dir()
  167. workflow.coordinator_set.update(workflow=None) # In Django 1.3 could do ON DELETE set NULL
  168. workflow.save()
  169. workflow.delete()
  170. class Workflow(Job):
  171. """
  172. http://incubator.apache.org/oozie/docs/3.2.0-incubating/docs/WorkflowFunctionalSpec.html
  173. """
  174. is_single = models.BooleanField(default=False)
  175. start = models.ForeignKey('Start', related_name='start_workflow', blank=True, null=True)
  176. end = models.ForeignKey('End', related_name='end_workflow', blank=True, null=True)
  177. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  178. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  179. 'Properties specified in the Job Properties element override properties specified in the '
  180. 'files specified in the Job XML element.'))
  181. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  182. help_text=_t('Job configuration properties used by all the actions of the workflow '
  183. '(e.g. mapred.job.queue.name=production)'))
  184. objects = WorkflowManager()
  185. HUE_ID = 'hue-id-w'
  186. def get_type(self):
  187. return 'workflow'
  188. def get_properties(self):
  189. return json.loads(self.job_properties)
  190. def clone(self, fs, new_owner=None):
  191. source_deployment_dir = self.deployment_dir # Needed
  192. nodes = self.node_set.all()
  193. links = Link.objects.filter(parent__workflow=self)
  194. copy = self
  195. copy.pk = None
  196. copy.id = None
  197. copy.name += '-copy'
  198. copy.deployment_dir = ''
  199. if new_owner is not None:
  200. copy.owner = new_owner
  201. copy.save()
  202. old_nodes_mapping = {}
  203. for node in nodes:
  204. prev_id = node.id
  205. node = node.get_full_node()
  206. node.pk = None
  207. node.id = None
  208. node.workflow = copy
  209. node.save()
  210. old_nodes_mapping[prev_id] = node
  211. for link in links:
  212. link.pk = None
  213. link.id = None
  214. link.parent = old_nodes_mapping[link.parent.id]
  215. link.child = old_nodes_mapping[link.child.id]
  216. link.save()
  217. copy.start = old_nodes_mapping[self.start.id]
  218. copy.end = old_nodes_mapping[self.end.id]
  219. copy.save()
  220. try:
  221. if copy.is_shared:
  222. perms = 0755
  223. else:
  224. perms = 0711
  225. fs.copy_remote_dir(source_deployment_dir, copy.deployment_dir, owner=copy.owner, dir_mode=perms)
  226. except WebHdfsException, e:
  227. msg = _('The copy of the deployment directory failed: %s.') % e
  228. LOG.error(msg)
  229. raise PopupException(msg)
  230. # Reload workflow from DB... clears relationship cache
  231. copy = Workflow.objects.get(id=copy.id)
  232. return copy
  233. def has_cycle(self):
  234. """
  235. Topological sort for detecting cycles in the directed graph.
  236. """
  237. queue = set([self.start])
  238. removed_edges = set()
  239. while queue:
  240. node = queue.pop()
  241. edges = set(node.get_children_links())
  242. for edge in edges:
  243. removed_edges.add(edge)
  244. # Edge has no other incoming edges
  245. if not set(edge.child.get_parent_links()) - removed_edges:
  246. queue.add(edge.child)
  247. graph_edges = set([edge for node in self.node_set.all() for edge in node.get_children_links()])
  248. return len(graph_edges - removed_edges) > 0 # Graph does not have unseen edges
  249. def find_parameters(self):
  250. params = set()
  251. for node in self.node_list:
  252. if hasattr(node, 'find_parameters'):
  253. params.update(node.find_parameters())
  254. return dict([(param, '') for param in list(params)])
  255. @property
  256. def actions(self):
  257. return Action.objects.filter(workflow=self, node_type__in=Action.types)
  258. @property
  259. def node_list(self):
  260. """Return a flatten node list ordered by the hierarchy of the nodes in the workflow"""
  261. def flatten(nodes):
  262. flat = []
  263. if type(nodes) == list:
  264. for node in nodes:
  265. flat.extend(flatten(node))
  266. else:
  267. flat.append(nodes)
  268. return flat
  269. def from_iterable(iterables):
  270. # Python 2.6 chain.from_iterable(['ABC', 'DEF']) --> A B C D E F
  271. for it in iterables:
  272. for element in it:
  273. yield element
  274. return list(chain(from_iterable([flatten(row) for row in self.get_hierarchy()])))
  275. @classmethod
  276. def get_application_path_key(cls):
  277. return 'oozie.wf.application.path'
  278. @classmethod
  279. def get_application_filename(cls):
  280. return 'workflow.xml'
  281. def get_absolute_url(self):
  282. return reverse('oozie:edit_workflow', kwargs={'workflow': self.id})
  283. def get_hierarchy(self):
  284. node = self.start
  285. return self.get_hierarchy_rec(node=node) + [[Kill.objects.get(name='kill', workflow=node.workflow)],
  286. [End.objects.get(name='end', workflow=node.workflow)]]
  287. def get_hierarchy_rec(self, node=None, skip_parents_check=False):
  288. if node is None:
  289. node = self.start
  290. if node.id is None:
  291. return []
  292. node = node.get_full_node()
  293. parents = node.get_parents()
  294. if len(parents) > 1 and not skip_parents_check:
  295. return []
  296. if isinstance(node, End):
  297. return [] # Not returning the end node
  298. elif isinstance(node, Decision):
  299. children = node.get_children('start')
  300. end = node.get_child_end_or_none()
  301. if end:
  302. return [[node, [self.get_hierarchy_rec(node=child) for child in children]] + self.get_hierarchy_rec(node=end, skip_parents_check=True)]
  303. else:
  304. return [[node, [self.get_hierarchy_rec(node=child) for child in children]]]
  305. elif isinstance(node, Fork):
  306. children = node.get_children('start')
  307. return [[node] + [[self.get_hierarchy_rec(node=child) for child in children],
  308. node.get_child_join()]] + self.get_hierarchy_rec(node.get_child_join().get_child('to'))
  309. elif isinstance(node, Join):
  310. return []
  311. else:
  312. child = Link.objects.filter(parent=node).exclude(name__in=['related', 'kill', 'error'])[0].child
  313. return [node] + self.get_hierarchy_rec(child)
  314. def gen_status_graph(self, forms, actions):
  315. template='editor/gen/workflow-graph-status.xml.mako'
  316. index = dict([(form.instance.id, form) for form in forms])
  317. actions_index = dict([(action.name, action) for action in actions])
  318. return django_mako.render_to_string(template, {'nodes': self.get_hierarchy(), 'index': index, 'actions': actions_index})
  319. def to_xml(self):
  320. tmpl = 'editor/gen/workflow.xml.mako'
  321. return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'workflow': self}))
  322. class Link(models.Model):
  323. # Links to exclude when using get_children_link(), get_parent_links() in the API
  324. META_LINKS = ('related')
  325. parent = models.ForeignKey('Node', related_name='child_node')
  326. child = models.ForeignKey('Node', related_name='parent_node', verbose_name='')
  327. name = models.CharField(max_length=40)
  328. comment = models.CharField(max_length=1024, default='', blank=True)
  329. def __unicode__(self):
  330. return '%s %s %s' % (self.parent, self.child, self.name)
  331. class Node(models.Model):
  332. """
  333. Base class for the Oozie WorkflowAction or ControlFlow Nodes.
  334. http://nightly.cloudera.com/cdh4/cdh/4/oozie-3.1.3-cdh4.0.0-SNAPSHOT/WorkflowFunctionalSpec.html#a3_Workflow_Nodes
  335. The Node model is an abstract base class. All concrete actions derive from it.
  336. And it provides something for the Action or ControlFlow to reference.
  337. See https://docs.djangoproject.com/en/dev/topics/db/models/#multi-table-inheritance
  338. """
  339. PARAM_FIELDS = ()
  340. name = models.CharField(max_length=40, validators=[name_validator], verbose_name=_t('Name'),
  341. help_text=_t('Name of the action, which must be unique by workflow.'))
  342. description = models.CharField(max_length=1024, blank=True, default='', verbose_name=_t('Description'),
  343. help_text=_t('The purpose of the action.'))
  344. node_type = models.CharField(max_length=64, blank=False, verbose_name=_t('Type'),
  345. help_text=_t('The type of action (e.g. MapReduce, Pig...)'))
  346. workflow = models.ForeignKey(Workflow)
  347. children = models.ManyToManyField('self', related_name='parents', symmetrical=False, through=Link)
  348. unique_together = ('workflow', 'name')
  349. def get_full_node(self):
  350. if self.node_type == Mapreduce.node_type:
  351. node = self.mapreduce
  352. elif self.node_type == Pig.node_type:
  353. node = self.pig
  354. elif self.node_type == Hive.node_type:
  355. node = self.hive
  356. elif self.node_type == Sqoop.node_type:
  357. node = self.sqoop
  358. elif self.node_type == Ssh.node_type:
  359. node = self.ssh
  360. elif self.node_type == Shell.node_type:
  361. node = self.shell
  362. elif self.node_type == DistCp.node_type:
  363. node = self.distcp
  364. elif self.node_type == Fs.node_type:
  365. node = self.fs
  366. elif self.node_type == Email.node_type:
  367. node = self.email
  368. elif self.node_type == Streaming.node_type:
  369. node = self.streaming
  370. elif self.node_type == Java.node_type:
  371. node = self.java
  372. elif self.node_type == Start.node_type:
  373. node = self.start
  374. elif self.node_type == End.node_type:
  375. node = self.end
  376. elif self.node_type == Kill.node_type:
  377. node = self.kill
  378. elif self.node_type == Fork.node_type:
  379. node = self.fork
  380. elif self.node_type == Decision.node_type:
  381. node = self.decision
  382. elif self.node_type == Join.node_type:
  383. node = self.join
  384. else:
  385. raise Exception(_('Unknown Node type: %s. Was it set at its creation?'), (self.node_type,))
  386. return node
  387. def find_parameters(self):
  388. return find_parameters(self, self.PARAM_FIELDS)
  389. def __unicode__(self):
  390. if self.name != '':
  391. return '%s' % self.name
  392. else:
  393. return '%s-%s' % (self.node_type, self.id)
  394. def to_xml(self):
  395. node = self.get_full_node()
  396. data = {
  397. 'node': node,
  398. }
  399. return django_mako.render_to_string(node.get_template_name(), data)
  400. # Can't use through relation directly with this Django version?
  401. # https://docs.djangoproject.com/en/1.2/topics/db/models/#intermediary-manytomany
  402. def get_link(self, name=None):
  403. a = Link.objects.filter(parent=self)
  404. if name is None:
  405. return Link.objects.exclude(name__in=Link.META_LINKS).get(parent=self)
  406. else:
  407. return Link.objects.exclude(name__in=Link.META_LINKS).get(parent=self, name=name)
  408. def get_child_link(self, name=None):
  409. return self.get_link(name)
  410. def get_child(self, name=None):
  411. return self.get_link(name).child.get_full_node()
  412. def get_children(self, name=None):
  413. if name is not None:
  414. return [link.child for link in Link.objects.exclude(name__in=Link.META_LINKS).filter(parent=self, name=name)]
  415. else:
  416. return [link.child for link in Link.objects.exclude(name__in=Link.META_LINKS).filter(parent=self)]
  417. def get_parent(self, name=None):
  418. if name is not None:
  419. return self.get_parent_link(name).parent.get_full_node()
  420. else:
  421. return self.get_parent_link().parent.get_full_node()
  422. def get_parents(self):
  423. return [link.parent for link in self.get_parent_links()]
  424. def get_parent_link(self, name=None):
  425. if name is not None:
  426. return Link.objects.get(child=self, name=name)
  427. else:
  428. return Link.objects.get(child=self)
  429. def get_parent_links(self):
  430. return Link.objects.filter(child=self).exclude(name__in=Link.META_LINKS)
  431. def get_children_links(self, name=None):
  432. if name is None:
  433. return Link.objects.exclude(name__in=Link.META_LINKS).filter(parent=self)
  434. else:
  435. return Link.objects.exclude(name__in=Link.META_LINKS).filter(parent=self, name=name)
  436. def get_all_children_links(self):
  437. return Link.objects.filter(parent=self)
  438. def get_template_name(self):
  439. return 'editor/gen/workflow-%s.xml.mako' % self.node_type
  440. def is_visible(self):
  441. return True
  442. class Action(Node):
  443. """
  444. http://incubator.apache.org/oozie/docs/3.2.0-incubating/docs/WorkflowFunctionalSpec.html#a3.2_Workflow_Action_Nodes
  445. """
  446. types = ()
  447. class Meta:
  448. # Cloning does not work anymore if not abstract
  449. abstract = True
  450. # The fields with '[]' as default value are JSON dictionaries
  451. # When adding a new action, also update
  452. # - Action.types below
  453. # - Node.get_full_node()
  454. # - forms.py _node_type_TO_FORM_CLS
  455. class Mapreduce(Action):
  456. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'jar_path', 'prepares')
  457. node_type = 'mapreduce'
  458. files = models.TextField(default="[]", verbose_name=_t('Files'),
  459. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  460. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  461. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  462. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  463. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  464. jar_path = models.CharField(max_length=PATH_MAX, verbose_name=_t('Jar name'),
  465. help_text=_t('Name or path to the %(program)s jar file on HDFS. E.g. examples.jar.') % {'program': 'MapReduce'})
  466. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  467. help_text=_t('List of absolute paths to delete and then to create before starting the application. '
  468. 'This should be used exclusively for directory cleanup.'))
  469. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  470. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  471. 'Properties specified in the Job Properties element override properties specified in the '
  472. 'files specified in the Job XML element.'))
  473. def get_properties(self):
  474. return json.loads(self.job_properties)
  475. def get_files(self):
  476. return json.loads(self.files)
  477. def get_archives(self):
  478. return json.loads(self.archives)
  479. def get_prepares(self):
  480. return json.loads(self.prepares)
  481. class Streaming(Action):
  482. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'mapper', 'reducer')
  483. node_type = "streaming"
  484. files = models.TextField(default="[]", verbose_name=_t('Files'),
  485. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  486. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  487. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  488. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  489. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  490. mapper = models.CharField(max_length=PATH_MAX, blank=False, verbose_name=_t('Mapper'),
  491. help_text=_t('The executable/script to be used as mapper.'))
  492. reducer = models.CharField(max_length=PATH_MAX, blank=False, verbose_name=_t('Reducer'),
  493. help_text=_t('The executable/script to be used as reducer.'))
  494. def get_properties(self):
  495. return json.loads(self.job_properties)
  496. def get_files(self):
  497. return json.loads(self.files)
  498. def get_archives(self):
  499. return json.loads(self.archives)
  500. class Java(Action):
  501. PARAM_FIELDS = ('files', 'archives', 'jar_path', 'main_class', 'args',
  502. 'java_opts', 'job_properties', 'prepares')
  503. node_type = "java"
  504. files = models.TextField(default="[]", verbose_name=_t('Files'),
  505. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  506. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  507. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  508. jar_path = models.CharField(max_length=PATH_MAX, blank=False, verbose_name=_t('Jar name'),
  509. help_text=_t('Name or path to the %(program)s jar file on HDFS. E.g. examples.jar.') % {'program': 'Java'})
  510. main_class = models.CharField(max_length=256, blank=False, verbose_name=_t('Main class'),
  511. help_text=_t('Full name of the Java class. E.g. org.apache.hadoop.examples.Grep'))
  512. args = models.CharField(max_length=4096, blank=True, verbose_name=_t('Arguments'),
  513. help_text=_t('Arguments of the main method. The value of each arg element is considered a single argument '
  514. 'and they are passed to the main method in the same order.'))
  515. java_opts = models.CharField(max_length=256, blank=True, verbose_name=_t('Java options'),
  516. help_text=_t('Command-line parameters used to start the JVM that will execute '
  517. 'the Java application. Using this element is equivalent to use the mapred.child.java.opts '
  518. 'configuration property.'))
  519. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  520. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  521. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  522. help_text=_t('List of absolute paths to delete and then to create before starting the application. '
  523. 'This should be used exclusively for directory cleanup.'))
  524. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  525. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  526. 'Properties specified in the Job Properties element override properties specified in the '
  527. 'files specified in the Job XML element.'))
  528. def get_properties(self):
  529. return json.loads(self.job_properties)
  530. def get_files(self):
  531. return json.loads(self.files)
  532. def get_archives(self):
  533. return json.loads(self.archives)
  534. def get_prepares(self):
  535. return json.loads(self.prepares)
  536. class Pig(Action):
  537. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'params', 'prepares')
  538. node_type = 'pig'
  539. script_path = models.CharField(max_length=256, blank=False, verbose_name=_t('Script name'),
  540. help_text=_t('Script name or path to the Pig script. E.g. my_script.pig.'))
  541. params = models.TextField(default="[]", verbose_name=_t('Parameters'),
  542. help_text=_t('The Pig parameters of the script. e.g. "-param", "INPUT=${inputDir}"'))
  543. files = models.TextField(default="[]", verbose_name=_t('Files'),
  544. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  545. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  546. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  547. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  548. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  549. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  550. help_text=_t('List of absolute paths to delete and then to create before starting the application. '
  551. 'This should be used exclusively for directory cleanup.'))
  552. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  553. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  554. 'Properties specified in the Job Properties element override properties specified in the '
  555. 'files specified in the Job XML element.'))
  556. def get_properties(self):
  557. return json.loads(self.job_properties)
  558. def get_files(self):
  559. return json.loads(self.files)
  560. def get_archives(self):
  561. return json.loads(self.archives)
  562. def get_params(self):
  563. return json.loads(self.params)
  564. def get_prepares(self):
  565. return json.loads(self.prepares)
  566. class Hive(Action):
  567. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'params', 'prepares')
  568. node_type = 'hive'
  569. script_path = models.CharField(max_length=256, blank=False, verbose_name=_t('Script name'),
  570. help_text=_t('Script name or path to the %(type)s script. E.g. my_script.sql.') % {'type': node_type.title()})
  571. params = models.TextField(default="[]", verbose_name=_t('Parameters'),
  572. help_text=_t('The %(type)s parameters of the script. E.g. "-param", "INPUT=${inputDir}"') % {'type': node_type.title()})
  573. files = models.TextField(default="[]", verbose_name=_t('Files'),
  574. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  575. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  576. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  577. job_properties = models.TextField(default='[{"name":"oozie.hive.defaults","value":"hive-site.xml"}]',
  578. verbose_name=_t('Hadoop job properties'),
  579. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  580. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  581. help_text=_t('List of absolute paths to delete then to create before starting the application. '
  582. 'This should be used exclusively for directory cleanup.'))
  583. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  584. help_text=_t('Refer to a Hive hive-site.xml file bundled in the workflow deployment directory. '))
  585. def get_properties(self):
  586. return json.loads(self.job_properties)
  587. def get_files(self):
  588. return json.loads(self.files)
  589. def get_archives(self):
  590. return json.loads(self.archives)
  591. def get_params(self):
  592. return json.loads(self.params)
  593. def get_prepares(self):
  594. return json.loads(self.prepares)
  595. class Sqoop(Action):
  596. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'params', 'prepares')
  597. node_type = 'sqoop'
  598. script_path = models.TextField(blank=True, verbose_name=_t('Command'), default='',
  599. help_text=_t('The full %(type)s command. Either put it here or split it by spaces and insert the parts as multiple parameters below.')
  600. % {'type': node_type.title()})
  601. params = models.TextField(default="[]", verbose_name=_t('Parameters'),
  602. help_text=_t('If no command is specified, split the command by spaces and insert the %(type)s parameters '
  603. 'here e.g. import, --connect, jdbc:hsqldb:file:db.hsqldb, ...') % {'type': node_type.title()})
  604. files = models.TextField(default="[]", verbose_name=_t('Files'),
  605. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  606. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  607. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  608. job_properties = models.TextField(default='[]',
  609. verbose_name=_t('Hadoop job properties'),
  610. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  611. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  612. help_text=_t('List of absolute paths to delete then to create before starting the application. '
  613. 'This should be used exclusively for directory cleanup'))
  614. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  615. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  616. 'Properties specified in the Job Properties element override properties specified in the '
  617. 'files specified in the Job XML element.'))
  618. def get_properties(self):
  619. return json.loads(self.job_properties)
  620. def get_files(self):
  621. return json.loads(self.files)
  622. def get_archives(self):
  623. return json.loads(self.archives)
  624. def get_params(self):
  625. return json.loads(self.params)
  626. def get_prepares(self):
  627. return json.loads(self.prepares)
  628. class Ssh(Action):
  629. PARAM_FIELDS = ('user', 'host', 'command', 'params')
  630. node_type = 'ssh'
  631. user = models.CharField(max_length=64, verbose_name=_t('User'),
  632. help_text=_t('User executing the shell command.'))
  633. host = models.CharField(max_length=256, verbose_name=_t('Host'),
  634. help_text=_t('Where the shell will be executed.'))
  635. command = models.CharField(max_length=256, verbose_name=_t('%(type)s command') % {'type': node_type.title()},
  636. help_text=_t('The command that will be executed.'))
  637. params = models.TextField(default="[]", verbose_name=_t('Arguments'),
  638. help_text=_t('The arguments of the %(type)s command') % {'type': node_type.title()})
  639. capture_output = models.BooleanField(default=False, verbose_name=_t('Capture output'),
  640. help_text=_t('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  641. 'command output must be in Java properties file format and it must not exceed 2KB. '
  642. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  643. 'via the String action:output(String node, String key) function') % {'program': node_type.title()})
  644. def get_params(self):
  645. return json.loads(self.params)
  646. class Shell(Action):
  647. PARAM_FIELDS = ('files', 'archives', 'job_properties', 'params', 'prepares')
  648. node_type = 'shell'
  649. command = models.CharField(max_length=256, blank=False, verbose_name=_t('%(type)s command') % {'type': node_type.title()},
  650. help_text=_t('The path of the Shell command to execute'))
  651. params = models.TextField(default="[]", verbose_name=_t('Arguments'),
  652. help_text=_t('The arguments of Shell command can then be specified using one or more argument element.'))
  653. files = models.TextField(default="[]", verbose_name=_t('Files'),
  654. help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
  655. archives = models.TextField(default="[]", verbose_name=_t('Archives'),
  656. help_text=_t('List of names or paths of the archives to be added to the distributed cache.'))
  657. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  658. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production)'))
  659. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  660. help_text=_t('List of absolute paths to delete then to create before starting the application. '
  661. 'This should be used exclusively for directory cleanup'))
  662. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  663. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  664. 'Properties specified in the Job Properties element override properties specified in the '
  665. 'files specified in the Job XML element.'))
  666. capture_output = models.BooleanField(default=False, verbose_name=_t('Capture output'),
  667. help_text=_t('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  668. 'command output must be in Java Properties file format and it must not exceed 2KB. '
  669. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  670. 'via the String action:output(String node, String key) function') % {'program': node_type.title()})
  671. def get_properties(self):
  672. return json.loads(self.job_properties)
  673. def get_files(self):
  674. return json.loads(self.files)
  675. def get_archives(self):
  676. return json.loads(self.archives)
  677. def get_params(self):
  678. return json.loads(self.params)
  679. def get_prepares(self):
  680. return json.loads(self.prepares)
  681. class DistCp(Action):
  682. PARAM_FIELDS = ('job_properties', 'params', 'prepares')
  683. node_type = 'distcp'
  684. params = models.TextField(default="[]", verbose_name=_t('Arguments'),
  685. help_text=_t('The arguments of the %(type)s command. Put options first, then source paths, then destination path.')
  686. % {'type': node_type.title()})
  687. job_properties = models.TextField(default='[]', verbose_name=_t('Hadoop job properties'),
  688. help_text=_t('For the job configuration (e.g. mapred.job.queue.name=production'))
  689. prepares = models.TextField(default="[]", verbose_name=_t('Prepares'),
  690. help_text=_t('List of absolute paths to delete then to create before starting the application. '
  691. 'This should be used exclusively for directory cleanup'))
  692. job_xml = models.CharField(max_length=PATH_MAX, default='', blank=True, verbose_name=_t('Job XML'),
  693. help_text=_t('Refer to a Hadoop JobConf job.xml file bundled in the workflow deployment directory. '
  694. 'Properties specified in the Job Properteis element override properties specified in the '
  695. 'files specified in the Job XML element.'))
  696. def get_properties(self):
  697. return json.loads(self.job_properties)
  698. def get_params(self):
  699. return json.loads(self.params)
  700. def get_prepares(self):
  701. return json.loads(self.prepares)
  702. class Fs(Action):
  703. PARAM_FIELDS = ('deletes', 'mkdirs', 'moves', 'chmods', 'touchzs')
  704. node_type = 'fs'
  705. deletes = models.TextField(default="[]", verbose_name=_t('Delete path'), blank=True,
  706. help_text=_t('Delete the specified path, if it is a directory it deletes recursively all its content and '
  707. 'then deletes the directory.'))
  708. mkdirs = models.TextField(default="[]", verbose_name=_t('Create directory'), blank=True,
  709. help_text=_t('Create the specified directory, it creates all missing directories in the path. '
  710. 'If the directory already exist it does a no-op.'))
  711. moves = models.TextField(default="[]", verbose_name=_t('Move file'), blank=True,
  712. help_text=_t('Move a file or directory to another path.'))
  713. chmods = models.TextField(default="[]", verbose_name=_t('Change permissions'), blank=True,
  714. help_text=_t('Change the permissions for the specified path. Permissions can be specified using the Unix Symbolic '
  715. 'representation (e.g. -rwxrw-rw-) or an octal representation (755).'))
  716. touchzs = models.TextField(default="[]", verbose_name=_t('Create or touch a file'), blank=True,
  717. help_text=_t('Creates a zero length file in the specified path if none exists or touch it.'))
  718. def get_deletes(self):
  719. return json.loads(self.deletes)
  720. def get_mkdirs(self):
  721. return json.loads(self.mkdirs)
  722. def get_moves(self):
  723. return json.loads(self.moves)
  724. def get_chmods(self):
  725. return json.loads(self.chmods)
  726. def get_touchzs(self):
  727. return json.loads(self.touchzs)
  728. class Email(Action):
  729. PARAM_FIELDS = ('to', 'cc', 'subject', 'body')
  730. node_type = 'email'
  731. to = models.TextField(default='', verbose_name=_t('to addresses'),
  732. help_text=_t('Comma-separated values.'))
  733. cc = models.TextField(default='', verbose_name=_t('cc addresses (optional)'), blank=True,
  734. help_text=_t('Comma-separated values.'))
  735. subject = models.TextField(default="[]", verbose_name=_t('Subject'), blank=True,
  736. help_text=_t('Plain-text.'))
  737. body = models.TextField(default="[]", verbose_name=_t('Body'), blank=True,
  738. help_text=_t('Plain-text.'))
  739. Action.types = (Mapreduce.node_type, Streaming.node_type, Java.node_type, Pig.node_type, Hive.node_type, Sqoop.node_type, Ssh.node_type, Shell.node_type,
  740. DistCp.node_type, Fs.node_type, Email.node_type)
  741. class ControlFlow(Node):
  742. """
  743. http://incubator.apache.org/oozie/docs/3.2.0-incubating/docs/WorkflowFunctionalSpec.html#a3.1_Control_Flow_Nodes
  744. """
  745. class Meta:
  746. abstract = True
  747. def get_xml(self):
  748. return django_mako.render_to_string(self.get_template_name(), {})
  749. def is_visible(self):
  750. return False
  751. # Could not make this abstract
  752. class Start(ControlFlow):
  753. node_type = 'start'
  754. class End(ControlFlow):
  755. node_type = 'end'
  756. class Kill(ControlFlow):
  757. node_type = 'kill'
  758. message = models.CharField(max_length=256, blank=False, default='Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]')
  759. class Fork(ControlFlow):
  760. """
  761. A Fork can be converted into a Decision node.
  762. """
  763. node_type = 'fork'
  764. def is_visible(self):
  765. return True
  766. def get_child_join(self):
  767. return Link.objects.get(parent=self, name='related').child.get_full_node()
  768. def convert_to_decision(self):
  769. self.remove_join()
  770. decision = Decision.objects.create(workflow=self.workflow, node_type=Decision.node_type)
  771. decision.save()
  772. links = self.get_all_children_links()
  773. has_default = False
  774. for link in links:
  775. if link.name == 'default':
  776. has_default = True
  777. link.parent = decision
  778. # Defaults to end
  779. if not has_default:
  780. link = Links.objects.create(name="default", parent=decision, child=self.workflow.end)
  781. link.save()
  782. self.delete()
  783. return decision
  784. def remove_join(self):
  785. join = self.get_child_join()
  786. after_join = join.get_child('to')
  787. for parent in join.get_parent_actions():
  788. link = parent.get_link('ok')
  789. link.child = after_join
  790. link.save()
  791. # Automatically delete links thought foreign keys
  792. join.delete()
  793. class Decision(ControlFlow):
  794. """
  795. Essentially a fork where the end is not a join, but another node.
  796. If two decisions share an end, the decision with the higher level takes the end
  797. and the lower level decision will not have an end.
  798. IE: D
  799. D N
  800. E
  801. The first 'D' will be assigned the end 'E'.
  802. The second 'D' will not have an end.
  803. This enables easier interpretation of visual hierarchy.
  804. """
  805. node_type = 'decision'
  806. def get_child_end_or_none(self):
  807. try:
  808. return Link.objects.get(parent=self, name='related').child.get_full_node()
  809. except Link.DoesNotExist:
  810. return None
  811. def is_visible(self):
  812. return True
  813. def update_description(self):
  814. self.description = ', '.join(self.get_children_links().values_list('comment', flat=True))
  815. self.save()
  816. class Join(ControlFlow):
  817. node_type = 'join'
  818. def is_visible(self):
  819. return True
  820. def get_parent_fork(self):
  821. return self.get_parent_link('related').parent.get_full_node()
  822. def get_parent_actions(self):
  823. return [link.parent for link in self.get_parent_links()]
  824. FREQUENCY_UNITS = (('minutes', _('Minutes')),
  825. ('hours', _('Hours')),
  826. ('days', _('Days')),
  827. ('months', _('Months')))
  828. FREQUENCY_NUMBERS = [(i, i) for i in xrange(1, 61)]
  829. DATASET_FREQUENCY = ['MINUTE', 'HOUR', 'DAY', 'MONTH', 'YEAR']
  830. class Coordinator(Job):
  831. """
  832. http://incubator.apache.org/oozie/docs/3.2.0-incubating/docs/CoordinatorFunctionalSpec.html
  833. """
  834. frequency_number = models.SmallIntegerField(default=1, choices=FREQUENCY_NUMBERS, verbose_name=_t('Frequency number'),
  835. help_text=_t('The number of units of the rate at which '
  836. 'data is periodically created.'))
  837. frequency_unit = models.CharField(max_length=20, choices=FREQUENCY_UNITS, default='days', verbose_name=_t('Frequency unit'),
  838. help_text=_t('The unit of the rate at which data is periodically created.'))
  839. timezone = models.CharField(max_length=24, choices=TIMEZONES, default='America/Los_Angeles', verbose_name=_t('Timezone'),
  840. help_text=_t('The timezone of the coordinator.'))
  841. start = models.DateTimeField(default=datetime.today(), verbose_name=_t('Start'),
  842. help_text=_t('When to start the first workflow.'))
  843. end = models.DateTimeField(default=datetime.today() + timedelta(days=3), verbose_name=_t('End'),
  844. help_text=_t('When to start the last workflow.'))
  845. workflow = models.ForeignKey(Workflow, null=True, verbose_name=_t('Workflow'),
  846. help_text=_t('The workflow to schedule repeatedly.'))
  847. timeout = models.SmallIntegerField(null=True, blank=True, verbose_name=_t('Timeout'),
  848. help_text=_t('How long in minutes the coordinator action will be in '
  849. 'WAITING or READY status before giving up on its execution.'))
  850. concurrency = models.PositiveSmallIntegerField(null=True, blank=True, choices=FREQUENCY_NUMBERS, verbose_name=_t('Concurrency'),
  851. help_text=_t('How many coordinator actions are allowed to run concurrently (RUNNING status) '
  852. 'before the coordinator engine starts throttling them.'))
  853. execution = models.CharField(max_length=10, null=True, blank=True, verbose_name=_t('Execution'),
  854. choices=(('FIFO', _t('FIFO (oldest first) default')),
  855. ('LIFO', _t('LIFO (newest first)')),
  856. ('LAST ONLY', _t('LAST_ONLY (discards all older materializations)'))),
  857. help_text=_t('Execution strategy of its coordinator actions when there is backlog of coordinator '
  858. 'actions in the coordinator engine. The different execution strategies are \'oldest first\', '
  859. '\'newest first\' and \'last one only\'. A backlog normally happens because of delayed '
  860. 'input data, concurrency control or because manual re-runs of coordinator jobs.'))
  861. throttle = models.PositiveSmallIntegerField(null=True, blank=True, choices=FREQUENCY_NUMBERS, verbose_name=_t('Throttle'),
  862. help_text=_t('The materialization or creation throttle value for its coordinator actions. '
  863. 'How many maximum coordinator actions are allowed to be in WAITING state concurrently.'))
  864. HUE_ID = 'hue-id-c'
  865. def get_type(self):
  866. return 'coordinator'
  867. def to_xml(self):
  868. tmpl = "editor/gen/coordinator.xml.mako"
  869. return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'coord': self}))
  870. def clone(self, new_owner=None):
  871. datasets = Dataset.objects.filter(coordinator=self)
  872. data_inputs = DataInput.objects.filter(coordinator=self)
  873. data_outputs = DataOutput.objects.filter(coordinator=self)
  874. copy = self
  875. copy.pk = None
  876. copy.id = None
  877. copy.name += '-copy'
  878. copy.deployment_dir = ''
  879. if new_owner is not None:
  880. copy.owner = new_owner
  881. copy.save()
  882. old_dataset_mapping = {}
  883. for dataset in datasets:
  884. prev_id = dataset.id
  885. dataset.pk = None
  886. dataset.id = None
  887. dataset.coordinator = copy
  888. dataset.save()
  889. old_dataset_mapping[prev_id] = dataset
  890. for data_input in data_inputs:
  891. data_input.pk = None
  892. data_input.id = None
  893. data_input.coordinator = copy
  894. data_input.dataset = old_dataset_mapping[data_input.dataset.id]
  895. data_input.save()
  896. for data_output in data_outputs:
  897. data_output.pk = None
  898. data_output.id = None
  899. data_output.coordinator = copy
  900. data_output.dataset = old_dataset_mapping[data_output.dataset.id]
  901. data_output.save()
  902. return copy
  903. @classmethod
  904. def get_application_path_key(cls):
  905. return 'oozie.coord.application.path'
  906. @classmethod
  907. def get_application_filename(cls):
  908. return 'coordinator.xml'
  909. @property
  910. def start_utc(self):
  911. return utc_datetime_format(self.start)
  912. @property
  913. def end_utc(self):
  914. return utc_datetime_format(self.end)
  915. def get_absolute_url(self):
  916. return reverse('oozie:edit_coordinator', kwargs={'coordinator': self.id})
  917. @property
  918. def frequency(self):
  919. return '${coord:%(unit)s(%(number)d)}' % {'unit': self.frequency_unit, 'number': self.frequency_number}
  920. @property
  921. def text_frequency(self):
  922. return '%(number)d %(unit)s' % {'unit': self.frequency_unit, 'number': self.frequency_number}
  923. def find_parameters(self):
  924. params = self.workflow.find_parameters()
  925. for dataset in self.dataset_set.all():
  926. for param in find_parameters(dataset, ['uri']):
  927. if param not in set(DATASET_FREQUENCY):
  928. params[param] = ''
  929. for ds in self.datainput_set.all():
  930. params.pop(ds.name, None)
  931. for ds in self.dataoutput_set.all():
  932. params.pop(ds.name, None)
  933. return params
  934. def utc_datetime_format(utc_time):
  935. return utc_time.strftime("%Y-%m-%dT%H:%MZ")
  936. class DatasetManager(models.Manager):
  937. def is_accessible_or_exception(self, request, dataset_id):
  938. if dataset_id is None:
  939. return
  940. try:
  941. dataset = Dataset.objects.get(pk=dataset_id)
  942. if dataset.coordinator.is_accessible(request.user):
  943. return dataset
  944. else:
  945. message = _("Permission denied. %(username)s don't have the permissions to access dataset %(id)s") % \
  946. {'username': request.user.username, 'id': dataset.id}
  947. access_warn(request, message)
  948. request.error(message)
  949. raise PopupException(message)
  950. except Dataset.DoesNotExist:
  951. raise PopupException(_('dataset %(id)s not exist') % {'id': dataset_id})
  952. class Dataset(models.Model):
  953. name = models.CharField(max_length=40, validators=[name_validator], verbose_name=_t('Name'),
  954. help_text=_t('The name of the dataset.)'))
  955. description = models.CharField(max_length=1024, blank=True, default='', verbose_name=_t('Description'),
  956. help_text=_t('A description of the dataset.'))
  957. start = models.DateTimeField(default=datetime.today(), verbose_name=_t('Start'),
  958. help_text=_t(' The UTC datetime of the initial instance of the dataset. The initial instance also provides '
  959. 'the baseline datetime to compute instances of the dataset using multiples of the frequency.'))
  960. frequency_number = models.SmallIntegerField(default=1, choices=FREQUENCY_NUMBERS, verbose_name=_t('Frequency number'),
  961. help_text=_t('The number of units of the rate at which '
  962. 'data is periodically created.'))
  963. frequency_unit = models.CharField(max_length=20, choices=FREQUENCY_UNITS, default='days', verbose_name=_t('Frequency unit'),
  964. help_text=_t('The unit of the rate at which data is periodically created.'))
  965. uri = models.CharField(max_length=1024, default='/data/${YEAR}${MONTH}${DAY}', verbose_name=_t('URI'),
  966. help_text=_t('The URI template that identifies the dataset and can be resolved into concrete URIs to identify a particular '
  967. 'dataset instance. The URI consist of constants (e.g. ${YEAR}/${MONTH}) and '
  968. 'configuration properties (e.g. /home/${USER}/projects/${PROJECT})'))
  969. timezone = models.CharField(max_length=24, choices=TIMEZONES, default='America/Los_Angeles', verbose_name=_t('Timezone'),
  970. help_text=_t('The timezone of the dataset.'))
  971. done_flag = models.CharField(max_length=64, blank=True, default='', verbose_name=_t('Done flag'),
  972. help_text=_t('The done file for the data set. If Done flag is not specified, then Oozie '
  973. 'configures Hadoop to create a _SUCCESS file in the output directory. If Done '
  974. 'flag is set to empty, then Coordinator looks for the existence of the directory itself.'))
  975. coordinator = models.ForeignKey(Coordinator, verbose_name=_t('Coordinator'),
  976. help_text=_t('The coordinator associated with this data.'))
  977. objects = DatasetManager()
  978. unique_together = ('coordinator', 'name')
  979. def __unicode__(self):
  980. return '%s' % (self.name,)
  981. @property
  982. def start_utc(self):
  983. return utc_datetime_format(self.start)
  984. @property
  985. def frequency(self):
  986. return '${coord:%(unit)s(%(number)d)}' % {'unit': self.frequency_unit, 'number': self.frequency_number}
  987. @property
  988. def text_frequency(self):
  989. return '%(number)d %(unit)s' % {'unit': self.frequency_unit, 'number': self.frequency_number}
  990. class DataInput(models.Model):
  991. name = models.CharField(max_length=40, validators=[name_validator], verbose_name=_t('Name of an input variable in the workflow.'),
  992. help_text=_t('The name of the variable of the workflow to automatically fill up.'))
  993. dataset = models.OneToOneField(Dataset, verbose_name=_t('The dataset representing format of the data input.'),
  994. help_text=_t('The pattern of the input data we want to process.'))
  995. coordinator = models.ForeignKey(Coordinator)
  996. unique_together = ('coordinator', 'name')
  997. class DataOutput(models.Model):
  998. name = models.CharField(max_length=40, validators=[name_validator], verbose_name=_t('Name of an output variable in the workflow'),
  999. help_text=_t('The name of the variable of the workflow to automatically filled up.'))
  1000. dataset = models.OneToOneField(Dataset, verbose_name=_t('The dataset representing the format of the data output.'),
  1001. help_text=_t('The pattern of the output data we want to generate.'))
  1002. coordinator = models.ForeignKey(Coordinator)
  1003. unique_together = ('coordinator', 'name')
  1004. class HistoryManager(models.Manager):
  1005. def create_from_submission(self, submission):
  1006. History.objects.create(submitter=submission.user,
  1007. oozie_job_id=submission.oozie_id,
  1008. job=submission.job,
  1009. properties=json.dumps(submission.properties))
  1010. class History(models.Model):
  1011. """
  1012. Contains information on submitted workflows/coordinators.
  1013. """
  1014. submitter = models.ForeignKey(User, db_index=True)
  1015. submission_date = models.DateTimeField(auto_now=True, db_index=True)
  1016. oozie_job_id = models.CharField(max_length=128)
  1017. job = models.ForeignKey(Job, db_index=True)
  1018. properties = models.TextField()
  1019. objects = HistoryManager()
  1020. @property
  1021. def properties_dict(self):
  1022. return json.loads(self.properties)
  1023. def get_absolute_oozie_url(self):
  1024. view = 'oozie:list_oozie_workflow'
  1025. if self.oozie_job_id.endswith('C'):
  1026. view = 'oozie:list_oozie_coordinator'
  1027. return reverse(view, kwargs={'job_id': self.oozie_job_id})
  1028. def get_workflow(self):
  1029. if self.oozie_job_id.endswith('W'):
  1030. return self.job
  1031. def get_coordinator(self):
  1032. if self.oozie_job_id.endswith('C'):
  1033. return self.job
  1034. @classmethod
  1035. def get_workflow_from_config(self, conf_dict):
  1036. try:
  1037. return Workflow.objects.get(id=conf_dict.get(Workflow.HUE_ID))
  1038. except Workflow.DoesNotExist:
  1039. pass
  1040. @classmethod
  1041. def get_coordinator_from_config(self, conf_dict):
  1042. try:
  1043. return Coordinator.objects.get(id=conf_dict.get(Coordinator.HUE_ID))
  1044. except Coordinator.DoesNotExist:
  1045. pass
  1046. @classmethod
  1047. def cross_reference_submission_history(cls, user, oozie_id, coordinator_job_id):
  1048. # Try do get the history
  1049. history = None
  1050. try:
  1051. history = History.objects.get(oozie_job_id=oozie_id)
  1052. if history.job.owner != user:
  1053. history = None
  1054. except History.DoesNotExist:
  1055. pass
  1056. return history
  1057. def find_parameters(instance, fields=None):
  1058. """Find parameters in the given fields"""
  1059. if fields is None:
  1060. fields = [field.name for field in instance._meta.fields]
  1061. params = []
  1062. for field in fields:
  1063. data = getattr(instance, field)
  1064. if isinstance(data, basestring):
  1065. for match in Template.pattern.finditer(data):
  1066. name = match.group('braced')
  1067. if name is not None:
  1068. params.append(name)
  1069. return params
  1070. # See http://wiki.apache.org/hadoop/JobConfFile
  1071. _STD_PROPERTIES = [
  1072. 'mapred.input.dir',
  1073. 'mapred.output.dir',
  1074. 'mapred.job.name',
  1075. 'mapred.job.queue.name',
  1076. 'mapred.mapper.class',
  1077. 'mapred.reducer.class',
  1078. 'mapred.combiner.class',
  1079. 'mapred.partitioner.class',
  1080. 'mapred.map.tasks',
  1081. 'mapred.reduce.tasks',
  1082. 'mapred.input.format.class',
  1083. 'mapred.output.format.class',
  1084. 'mapred.input.key.class',
  1085. 'mapred.input.value.class',
  1086. 'mapred.output.key.class',
  1087. 'mapred.output.value.class',
  1088. 'mapred.mapoutput.key.class',
  1089. 'mapred.mapoutput.value.class',
  1090. 'mapred.combine.buffer.size',
  1091. 'mapred.min.split.size',
  1092. 'mapred.speculative.execution',
  1093. 'mapred.map.tasks.speculative.execution',
  1094. 'mapred.reduce.tasks.speculative.execution',
  1095. 'mapred.queue.default.acl-administer-jobs',
  1096. ]
  1097. _STD_PROPERTIES_JSON = json.dumps(_STD_PROPERTIES)
  1098. ACTION_TYPES = {
  1099. Mapreduce.node_type: Mapreduce,
  1100. Streaming.node_type: Streaming,
  1101. Java.node_type: Java,
  1102. Pig.node_type: Pig,
  1103. Hive.node_type: Hive,
  1104. Sqoop.node_type: Sqoop,
  1105. Ssh.node_type: Ssh,
  1106. Shell.node_type: Shell,
  1107. DistCp.node_type: DistCp,
  1108. Fs.node_type: Fs,
  1109. Email.node_type: Email,
  1110. }
  1111. NODE_TYPES = ACTION_TYPES.copy()
  1112. NODE_TYPES.update({
  1113. Fork.node_type: Fork,
  1114. Join.node_type: Join,
  1115. Decision.node_type: Decision,
  1116. Start.node_type: Start,
  1117. End.node_type: End,
  1118. })