models2.py 102 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import json
  18. import logging
  19. import re
  20. import time
  21. import uuid
  22. from datetime import datetime, timedelta
  23. from dateutil.parser import parse
  24. from string import Template
  25. from django.core.urlresolvers import reverse
  26. from django.db.models import Q
  27. from django.utils.encoding import force_unicode
  28. from django.utils.translation import ugettext as _
  29. from django.contrib.auth.models import User
  30. from desktop.conf import USE_DEFAULT_CONFIGURATION
  31. from desktop.lib import django_mako
  32. from desktop.lib.exceptions_renderable import PopupException
  33. from desktop.lib.i18n import smart_str
  34. from desktop.lib.json_utils import JSONEncoderForHTML
  35. from desktop.models import DefaultConfiguration, Document2, Document
  36. from hadoop.fs.hadoopfs import Hdfs
  37. from hadoop.fs.exceptions import WebHdfsException
  38. from liboozie.oozie_api import get_oozie
  39. from liboozie.submission2 import Submission
  40. from liboozie.submission2 import create_directories
  41. from notebook.models import Notebook
  42. from oozie.conf import REMOTE_SAMPLE_DIR
  43. from oozie.utils import utc_datetime_format, UTC_TIME_FORMAT, convert_to_server_timezone
  44. from oozie.importlib.workflows import generate_v2_graph_nodes, MalformedWfDefException, InvalidTagWithNamespaceException
  45. LOG = logging.getLogger(__name__)
  46. class Job(object):
  47. def find_all_parameters(self, with_lib_path=True):
  48. params = self.find_parameters()
  49. for param in self.parameters:
  50. params[param['name'].strip()] = param['value']
  51. if params.get('nominal_time') == '':
  52. params['nominal_time'] = datetime.today().strftime(UTC_TIME_FORMAT)
  53. return [{'name': name, 'value': value} for name, value in params.iteritems() if with_lib_path or name != 'oozie.use.system.libpath']
  54. @classmethod
  55. def get_workspace(cls, user):
  56. if not isinstance(user, basestring):
  57. user = user.username
  58. return (REMOTE_SAMPLE_DIR.get() + '/hue-oozie-$TIME').replace('$USER', user).replace('$TIME', str(time.time()))
  59. @property
  60. def validated_name(self):
  61. good_name = []
  62. for c in self.name[:40]:
  63. if not good_name:
  64. if not re.match('[a-zA-Z_\{\$\}]', c):
  65. c = '_'
  66. else:
  67. if not re.match('[\-_a-zA-Z0-9\{\$\}]', c):
  68. c = '_'
  69. good_name.append(c)
  70. return ''.join(good_name)
  71. def __str__(self):
  72. return '%s' % force_unicode(self.name)
  73. def deployment_dir(self):
  74. return None
  75. def check_workspace(self, fs, user):
  76. # Create optional default root workspace for the first submission
  77. if REMOTE_SAMPLE_DIR.get() == REMOTE_SAMPLE_DIR.config.default_value:
  78. create_directories(fs, [REMOTE_SAMPLE_DIR.get()])
  79. Submission(user, self, fs, None, {})._create_dir(self.deployment_dir)
  80. Submission(user, self, fs, None, {})._create_dir(Hdfs.join(self.deployment_dir, 'lib'))
  81. def import_workspace(self, fs, source_deployment_dir, owner):
  82. try:
  83. fs.copy_remote_dir(source_deployment_dir, self.deployment_dir, owner=owner)
  84. except WebHdfsException, e:
  85. msg = _('The copy of the deployment directory failed: %s.') % e
  86. LOG.error(msg)
  87. raise PopupException(msg)
  88. class WorkflowConfiguration(object):
  89. APP_NAME = 'oozie-workflow'
  90. SLA_DEFAULT = [
  91. {'key': 'enabled', 'value': False}, # Always first element
  92. {'key': 'nominal-time', 'value': '${nominal_time}'},
  93. {'key': 'should-start', 'value': ''},
  94. {'key': 'should-end', 'value': '${30 * MINUTES}'},
  95. {'key': 'max-duration', 'value': ''},
  96. {'key': 'alert-events', 'value': ''},
  97. {'key': 'alert-contact', 'value': ''},
  98. {'key': 'notification-msg', 'value': ''},
  99. {'key': 'upstream-apps', 'value': ''},
  100. ]
  101. PROPERTIES = [
  102. {
  103. "multiple": True,
  104. "defaultValue": [
  105. {
  106. 'name': 'oozie.use.system.libpath',
  107. 'value': True
  108. }
  109. ],
  110. "value": [
  111. {
  112. 'name': 'oozie.use.system.libpath',
  113. 'value': True
  114. }
  115. ],
  116. "nice_name": _("Variables"),
  117. "key": "parameters",
  118. "help_text": _("Add one or more Oozie workflow job parameters."),
  119. "type": "parameters"
  120. }, {
  121. "multiple": False,
  122. "defaultValue": '',
  123. "value": '',
  124. "nice_name": _("Workspace"),
  125. "key": "deployment_dir",
  126. "help_text": _("Specify the deployment directory."),
  127. "type": "hdfs-file"
  128. }, {
  129. "multiple": True,
  130. "defaultValue": [],
  131. "value": [],
  132. "nice_name": _("Hadoop Properties"),
  133. "key": "properties",
  134. "help_text": _("Hadoop configuration properties."),
  135. "type": "settings"
  136. }, {
  137. "multiple": False,
  138. "defaultValue": True,
  139. "value": True,
  140. "nice_name": _("Show graph arrows"),
  141. "key": "show_arrows",
  142. "help_text": _("Toggles display of graph arrows."),
  143. "type": "boolean"
  144. }, {
  145. "multiple": False,
  146. "defaultValue": "uri:oozie:workflow:0.5",
  147. "value": "uri:oozie:workflow:0.5",
  148. "nice_name": _("Version"),
  149. "key": "schema_version",
  150. "help_text": _("Oozie XML Schema Version"),
  151. "type": "string",
  152. "options": [
  153. "uri:oozie:workflow:0.5",
  154. "uri:oozie:workflow:0.4.5",
  155. "uri:oozie:workflow:0.4",
  156. ]
  157. }, {
  158. "multiple": False,
  159. "defaultValue": '',
  160. "value": '',
  161. "nice_name": _("Job XML"),
  162. "key": "job_xml",
  163. "help_text": _("Oozie Job XML file"),
  164. "type": "hdfs-file"
  165. }, {
  166. "multiple": False,
  167. "defaultValue": False,
  168. "value": False,
  169. "nice_name": _("SLA Enabled"),
  170. "key": "sla_enabled",
  171. "help_text": _("SLA Enabled"),
  172. "type": "boolean"
  173. }, {
  174. "multiple": False,
  175. "defaultValue": SLA_DEFAULT,
  176. "value": SLA_DEFAULT,
  177. "nice_name": _("SLA Configuration"),
  178. "key": "sla",
  179. "help_text": _("Oozie SLA properties"),
  180. "type": "settings",
  181. "options": [prop['key'] for prop in SLA_DEFAULT]
  182. }
  183. ]
  184. class Workflow(Job):
  185. XML_FILE_NAME = 'workflow.xml'
  186. PROPERTY_APP_PATH = 'oozie.wf.application.path'
  187. HUE_ID = 'hue-id-w'
  188. def __init__(self, data=None, document=None, workflow=None, user=None):
  189. self.document = document
  190. self.user = user
  191. if document is not None:
  192. self.data = document.data
  193. elif data is not None:
  194. self.data = data
  195. else:
  196. if not workflow:
  197. workflow = self.get_default_workflow()
  198. workflow['properties'] = self.get_workflow_properties_for_user(user, workflow)
  199. self.data = json.dumps({
  200. 'layout': [{
  201. "size":12, "rows":[
  202. {"widgets":[{"size":12, "name":"Start", "id":"3f107997-04cc-8733-60a9-a4bb62cebffc", "widgetType":"start-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]},
  203. {"widgets":[{"size":12, "name":"End", "id":"33430f0f-ebfa-c3ec-f237-3e77efa03d0a", "widgetType":"end-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]},
  204. {"widgets":[{"size":12, "name":"Kill", "id":"17c9c895-5a16-7443-bb81-f34b30b21548", "widgetType":"kill-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]}
  205. ],
  206. "drops":[ "temp"],
  207. "klass":"card card-home card-column span12"
  208. }],
  209. 'workflow': workflow
  210. })
  211. @classmethod
  212. def get_application_path_key(cls):
  213. return 'oozie.wf.application.path'
  214. @classmethod
  215. def gen_workflow_data_from_xml(cls, user, oozie_workflow):
  216. node_list = []
  217. try:
  218. node_list = generate_v2_graph_nodes(oozie_workflow.definition)
  219. except MalformedWfDefException, e:
  220. LOG.exception("Could not find any nodes in Workflow definition. Maybe it's malformed?")
  221. except InvalidTagWithNamespaceException, e:
  222. LOG.exception(
  223. "Tag with namespace %(namespace)s is not valid. Please use one of the following namespaces: %(namespaces)s" % {
  224. 'namespace': e.namespace,
  225. 'namespaces': e.namespaces
  226. })
  227. _to_lowercase(node_list)
  228. adj_list = _create_graph_adjaceny_list(node_list)
  229. node_hierarchy = ['start']
  230. _get_hierarchy_from_adj_list(adj_list, adj_list['start']['ok_to'], node_hierarchy)
  231. _update_adj_list(adj_list)
  232. wf_rows = _create_workflow_layout(node_hierarchy, adj_list)
  233. data = {'layout': [{}], 'workflow': {}}
  234. if wf_rows:
  235. data['layout'][0]['rows'] = wf_rows
  236. wf_nodes = []
  237. _dig_nodes(node_hierarchy, adj_list, user, wf_nodes)
  238. data['workflow']['nodes'] = wf_nodes
  239. data['workflow']['id'] = '123'
  240. data['workflow']['properties'] = cls.get_workflow_properties_for_user(user, workflow=None)
  241. data['workflow']['properties'].update({
  242. 'deployment_dir': '/user/hue/oozie/workspaces/hue-oozie-1452553957.19'
  243. })
  244. return data
  245. @classmethod
  246. def get_default_workflow(cls):
  247. return {
  248. "id": None,
  249. "uuid": None,
  250. "name": "My Workflow",
  251. "nodes": [
  252. {"id": "3f107997-04cc-8733-60a9-a4bb62cebffc", "name": "Start", "type": "start-widget", "properties": {},
  253. "children": [{'to': '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'}]},
  254. {"id": "33430f0f-ebfa-c3ec-f237-3e77efa03d0a", "name": "End", "type": "end-widget", "properties": {},
  255. "children": []},
  256. {"id": "17c9c895-5a16-7443-bb81-f34b30b21548", "name": "Kill", "type": "kill-widget",
  257. "properties": {'message': _('Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]')},
  258. "children": []}
  259. ]
  260. }
  261. @classmethod
  262. def get_workflow_properties_for_user(cls, user, workflow=None):
  263. workflow = workflow if workflow is not None else {}
  264. properties = workflow.get('properties', None)
  265. if not properties:
  266. config = None
  267. properties = cls.get_properties()
  268. if user is not None:
  269. if USE_DEFAULT_CONFIGURATION.get():
  270. config = DefaultConfiguration.objects.get_configuration_for_user(app=WorkflowConfiguration.APP_NAME, user=user)
  271. if config is not None:
  272. properties.update(config.properties_dict)
  273. properties.update({
  274. 'wf1_id': None,
  275. 'description': ''
  276. })
  277. return properties
  278. @staticmethod
  279. def get_properties():
  280. return dict((prop['key'], prop['value']) for prop in WorkflowConfiguration.PROPERTIES)
  281. @property
  282. def id(self):
  283. return self.document.id
  284. @property
  285. def uuid(self):
  286. return self.document.uuid
  287. @property
  288. def name(self):
  289. _data = self.get_data()
  290. return _data['workflow']['name']
  291. @property
  292. def deployment_dir(self):
  293. _data = self.get_data()
  294. return _data['workflow']['properties']['deployment_dir']
  295. @property
  296. def parameters(self):
  297. _data = self.get_data()
  298. return _data['workflow']['properties']['parameters']
  299. @property
  300. def sla_enabled(self):
  301. _data = self.get_data()
  302. return _data['workflow']['properties']['sla'][0].get('value')
  303. @property
  304. def has_some_slas(self):
  305. return self.sla_enabled or any([node.sla_enabled for node in self.nodes])
  306. @property
  307. def credentials(self):
  308. return list(set([cred for node in self.nodes for cred in node.data['properties']['credentials']]))
  309. @property
  310. def sla(self):
  311. _data = self.get_data()
  312. return _data['workflow']['properties']['sla']
  313. @property
  314. def nodes(self):
  315. _data = self.get_data()
  316. return [Node(node, self.user) for node in _data['workflow']['nodes']]
  317. def find_parameters(self):
  318. params = set()
  319. for param in find_dollar_braced_variables(self.name):
  320. params.add(param)
  321. if self.sla_enabled:
  322. for param in find_json_parameters(self.sla):
  323. params.add(param)
  324. for node in self.nodes:
  325. params.update(node.find_parameters())
  326. return dict([(param, '') for param in list(params)])
  327. def get_json(self):
  328. _data = self.get_data()
  329. return json.dumps(_data)
  330. def get_data(self):
  331. _data = json.loads(self.data)
  332. if self.document is not None:
  333. _data['workflow']['id'] = self.document.id
  334. _data['workflow']['dependencies'] = list(self.document.dependencies.values('uuid', ))
  335. else:
  336. _data['workflow']['dependencies'] = []
  337. if 'parameters' not in _data['workflow']['properties']:
  338. _data['workflow']['properties']['parameters'] = [
  339. {'name': 'oozie.use.system.libpath', 'value': True},
  340. ]
  341. if 'show_arrows' not in _data['workflow']['properties']:
  342. _data['workflow']['properties']['show_arrows'] = True
  343. for node in _data['workflow']['nodes']:
  344. if 'credentials' in node['properties']: # If node is an Action
  345. if 'retry_max' not in node['properties']: # When displaying a workflow
  346. node['properties']['retry_max'] = []
  347. if 'retry_interval' not in node['properties']:
  348. node['properties']['retry_interval'] = []
  349. # Backward compatibility
  350. _upgrade_older_node(node)
  351. return _data
  352. def to_xml(self, mapping=None):
  353. if mapping is None:
  354. mapping = {}
  355. tmpl = 'editor2/gen/workflow.xml.mako'
  356. data = self.get_data()
  357. nodes = [node for node in self.nodes if node.name != 'End'] + [node for node in self.nodes if
  358. node.name == 'End'] # End at the end
  359. node_mapping = dict([(node.id, node) for node in nodes])
  360. sub_wfs_ids = [node.data['properties']['workflow'] for node in nodes if node.data['type'] == 'subworkflow']
  361. workflow_mapping = dict(
  362. [(workflow.uuid, Workflow(document=workflow, user=self.user)) for workflow in Document2.objects.filter(uuid__in=sub_wfs_ids)])
  363. xml = re.sub(re.compile('>\s*\n+', re.MULTILINE), '>\n', django_mako.render_to_string(tmpl, {
  364. 'wf': self,
  365. 'workflow': data['workflow'],
  366. 'nodes': nodes,
  367. 'mapping': mapping,
  368. 'node_mapping': node_mapping,
  369. 'workflow_mapping': workflow_mapping
  370. }))
  371. return force_unicode(xml.strip())
  372. def get_absolute_url(self):
  373. return reverse('oozie:edit_workflow') + '?workflow=%s' % self.id
  374. def override_subworkflow_id(self, sub_wf_action, workflow_id):
  375. _data = self.get_data()
  376. action = [_action for _action in _data['workflow']['nodes'] if _action['id'] == sub_wf_action.id]
  377. if action:
  378. action[0]['properties']['job_properties'].append({'name': Workflow.HUE_ID, 'value': workflow_id})
  379. self.data = json.dumps(_data)
  380. def update_name(self, name):
  381. _data = self.get_data()
  382. _data['workflow']['name'] = name
  383. self.data = json.dumps(_data)
  384. def set_workspace(self, user):
  385. _data = json.loads(self.data)
  386. _data['workflow']['properties']['deployment_dir'] = Job.get_workspace(user)
  387. self.data = json.dumps(_data)
  388. def create_single_action_workflow_data(self, node_id):
  389. _data = json.loads(self.data)
  390. start_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'Start'][0]
  391. submit_node = [node for node in _data['workflow']['nodes'] if node['id'] == node_id][0]
  392. end_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'End'][0]
  393. kill_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'Kill'][0]
  394. # Modify children to point Start -> Submit_node -> End/Kill
  395. start_node['children'] = [{'to': submit_node['id']}]
  396. submit_node['children'] = [{'to': end_node['id']}, {'error': kill_node['id']}]
  397. _data['workflow']['properties']['deployment_dir'] = None
  398. # Recursively find the widget node
  399. def _get_node(rows, node_id):
  400. for row in rows:
  401. if not row['widgets']:
  402. for col in row['columns']:
  403. node = _get_node(col['rows'], node_id)
  404. if node:
  405. return node
  406. elif row['widgets'][0]['id'] == node_id:
  407. return row
  408. # Create wf data with above nodes
  409. return json.dumps({
  410. 'layout': [{
  411. "size": 12,
  412. "rows": [
  413. [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'Start'][0],
  414. _get_node(_data['layout'][0]['rows'], node_id),
  415. [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'End'][0],
  416. [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'Kill'][0]
  417. ],
  418. "drops": ["temp"],
  419. "klass": "card card-home card-column span12"
  420. }],
  421. 'workflow': {
  422. "id": None,
  423. "uuid": None,
  424. "name": _data['workflow']['name'],
  425. "properties": _data['workflow']['properties'],
  426. "nodes": [start_node, submit_node, end_node, kill_node]
  427. }
  428. })
  429. # Updates node_list to lowercase names
  430. # To avoid case-sensitive failures
  431. def _to_lowercase(node_list):
  432. for node in node_list:
  433. for key in node.keys():
  434. if hasattr(node[key], 'lower'):
  435. node[key] = node[key].lower()
  436. def _update_adj_list(adj_list):
  437. uuids = {}
  438. id = 1
  439. first_kill_node_seen = False
  440. for node in adj_list.keys():
  441. adj_list[node]['id'] = id
  442. # Oozie uses same action for streaming and mapreduce but Hue manages them differently
  443. if adj_list[node]['node_type'] == 'map-reduce':
  444. if 'streaming' in adj_list[node]['name']:
  445. adj_list[node]['node_type'] = 'streaming'
  446. else:
  447. adj_list[node]['node_type'] = 'mapreduce'
  448. elif adj_list[node]['node_type'] == 'sub-workflow':
  449. adj_list[node]['node_type'] = 'subworkflow'
  450. if adj_list[node]['node_type'] == 'kill':
  451. # JS requires at least one of the kill nodes to have this Id
  452. if not first_kill_node_seen:
  453. adj_list[node]['uuid'] = '17c9c895-5a16-7443-bb81-f34b30b21548'
  454. first_kill_node_seen = True
  455. else:
  456. adj_list[node]['uuid'] = str(uuid.uuid4())
  457. elif adj_list[node]['node_type'] == 'start':
  458. adj_list[node]['uuid'] = '3f107997-04cc-8733-60a9-a4bb62cebffc'
  459. elif adj_list[node]['node_type'] == 'end':
  460. adj_list[node]['uuid'] = '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'
  461. else:
  462. adj_list[node]['uuid'] = node[-4:] + str(uuid.uuid4())[4:]
  463. uuids[id] = adj_list[node]['uuid']
  464. id += 1
  465. return adj_list
  466. def _dig_nodes(nodes, adj_list, user, wf_nodes):
  467. for node in nodes:
  468. if type(node) != list:
  469. node = adj_list[node]
  470. properties = {}
  471. if '%s-widget' % node['node_type'] in NODES:
  472. properties = dict(NODES['%s-widget' % node['node_type']].get_fields())
  473. if node['node_type'] == 'pig':
  474. properties['script_path'] = node.get('pig').get('script_path')
  475. elif node['node_type'] == 'spark':
  476. properties['class'] = node.get('spark').get('class')
  477. properties['jars'] = node.get('spark').get('jar')
  478. elif node['node_type'] == 'hive' or node['node_type'] == 'hive2':
  479. properties['script_path'] = node.get('hive').get('script')
  480. elif node['node_type'] == 'java':
  481. properties['main_class'] = node.get('java').get('main-class')
  482. elif node['node_type'] == 'sqoop':
  483. properties['command'] = node.get('sqoop').get('command')
  484. elif node['node_type'] == 'mapreduce':
  485. properties['job_properties'] = node.get('job_properties')
  486. elif node['node_type'] == 'shell':
  487. properties['shell_command'] = node.get('shell').get('command')
  488. elif node['node_type'] == 'ssh':
  489. properties['user'] = '%s@%s' % (node.get('ssh').get('user'), node.get('ssh').get('host'))
  490. properties['ssh_command'] = node.get('ssh').get('command')
  491. elif node['node_type'] == 'fs':
  492. fs_props = node.get('fs')
  493. # TBD: gather props for different fs operations
  494. elif node['node_type'] == 'email':
  495. properties['to'] = node.get('email').get('to')
  496. properties['subject'] = node.get('email').get('subject')
  497. #TBD: body doesn't show up
  498. properties['body'] = node.get('email').get('body')
  499. elif node['node_type'] == 'streaming':
  500. properties['mapper'] = node.get('streaming').get('mapper')
  501. properties['reducer'] = node.get('streaming').get('reducer')
  502. elif node['node_type'] == 'distcp':
  503. properties['distcp_parameters'] = node.get('params')
  504. elif node['node_type'] == 'subworkflow':
  505. properties['app-path'] = node.get('subworkflow').get('app-path')
  506. properties['workflow'] = node.get('uuid')
  507. properties['job_properties'] = []
  508. properties['sla'] = ''
  509. children = []
  510. if node['node_type'] in ('fork', 'decision'):
  511. for key in node.keys():
  512. if key.startswith('path'):
  513. children.append({'to': adj_list[node[key]]['uuid'], 'condition': '${ 1 gt 0 }'})
  514. if node['node_type'] == 'decision':
  515. children.append({'to': adj_list[node['default']]['uuid'], 'condition': '${ 1 gt 0 }'})
  516. else:
  517. if node.get('ok_to'):
  518. children.append({'to': adj_list[node['ok_to']]['uuid']})
  519. if node.get('error_to'):
  520. children.append({'error': adj_list[node['error_to']]['uuid']})
  521. wf_nodes.append({
  522. "id": node['uuid'],
  523. "name": '%s-%s' % (node['node_type'].split('-')[0], node['uuid'][:4]),
  524. "type": "%s-widget" % node['node_type'],
  525. "properties": properties,
  526. "children": children
  527. })
  528. else:
  529. _dig_nodes(node, adj_list, user, wf_nodes)
  530. def _create_workflow_layout(nodes, adj_list, size=12):
  531. wf_rows = []
  532. for node in nodes:
  533. if type(node) == list and len(node) == 1:
  534. node = node[0]
  535. if type(node) != list:
  536. wf_rows.append({"widgets":[{"size":size, "name": adj_list[node]['node_type'], "id": adj_list[node]['uuid'], "widgetType": "%s-widget" % adj_list[node]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  537. else:
  538. if adj_list[node[0]]['node_type'] in ('fork', 'decision'):
  539. wf_rows.append({"widgets":[{"size":size, "name": adj_list[node[0]]['name'], "id": adj_list[node[0]]['uuid'], "widgetType": "%s-widget" % adj_list[node[0]]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  540. wf_rows.append({
  541. "id": str(uuid.uuid4()),
  542. "widgets":[
  543. ],
  544. "columns":[
  545. {
  546. "id": str(uuid.uuid4()),
  547. "size": (size / len(node[1])),
  548. "rows":
  549. [{
  550. "id": str(uuid.uuid4()),
  551. "widgets": c['widgets'],
  552. "columns":c.get('columns') or []
  553. } for c in col],
  554. "klass":"card card-home card-column span%s" % (size / len(node[1]))
  555. }
  556. for col in [_create_workflow_layout(item, adj_list, size) for item in node[1]]
  557. ]
  558. })
  559. if adj_list[node[0]]['node_type'] == 'fork':
  560. wf_rows.append({"widgets":[{"size":size, "name": adj_list[node[2]]['name'], "id": adj_list[node[2]]['uuid'], "widgetType": "%s-widget" % adj_list[node[2]]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  561. else:
  562. wf_rows.append(_create_workflow_layout(node, adj_list, size))
  563. return wf_rows
  564. def _get_hierarchy_from_adj_list(adj_list, curr_node, node_hierarchy):
  565. _get_hierarchy_from_adj_list_helper(adj_list, curr_node, node_hierarchy)
  566. # Add End and Kill nodes to node_hierarchy
  567. for key in adj_list.keys():
  568. if adj_list[key]['node_type'] == 'kill':
  569. node_hierarchy.append([adj_list[key]['name']])
  570. node_hierarchy.append([adj_list[key]['name'] for key in adj_list.keys() if adj_list[key]['node_type'] == 'end'])
  571. def _get_hierarchy_from_adj_list_helper(adj_list, curr_node, node_hierarchy):
  572. if not curr_node or adj_list[curr_node]['node_type'] in ('join', 'end', 'kill'):
  573. return curr_node
  574. elif adj_list[curr_node]['node_type'] in ('fork', 'decision'):
  575. branch_nodes = []
  576. branch_nodes.append(curr_node)
  577. join_node = None
  578. children = []
  579. for key in adj_list[curr_node].keys():
  580. if key.startswith('path'):
  581. child = []
  582. return_node = _get_hierarchy_from_adj_list_helper(adj_list, adj_list[curr_node][key], child)
  583. join_node = return_node if not join_node else join_node
  584. if child:
  585. children.append(child)
  586. branch_nodes.append(children)
  587. if adj_list[curr_node]['node_type'] == 'fork':
  588. branch_nodes.append(join_node)
  589. node_hierarchy.append(branch_nodes)
  590. return _get_hierarchy_from_adj_list_helper(adj_list, adj_list[join_node]['ok_to'], node_hierarchy)
  591. node_hierarchy.append(branch_nodes)
  592. return join_node
  593. else:
  594. node_hierarchy.append(curr_node)
  595. return _get_hierarchy_from_adj_list_helper(adj_list, adj_list[curr_node]['ok_to'], node_hierarchy)
  596. def _create_graph_adjaceny_list(nodes):
  597. start_node = [node for node in nodes if node.get('node_type') == 'start'][0]
  598. adj_list = {'start': start_node}
  599. for node in nodes:
  600. if node and node.get('node_type') != 'start':
  601. adj_list[node['name']] = node
  602. return adj_list
  603. class Node():
  604. def __init__(self, data, user=None):
  605. self.data = data
  606. self.user = user
  607. self._augment_data()
  608. def to_xml(self, mapping=None, node_mapping=None, workflow_mapping=None):
  609. if mapping is None:
  610. mapping = {}
  611. if node_mapping is None:
  612. node_mapping = {}
  613. if workflow_mapping is None:
  614. workflow_mapping = {}
  615. if self.data['type'] in ('hive2', 'hive-document') and not self.data['properties']['jdbc_url']:
  616. self.data['properties']['jdbc_url'] = _get_hiveserver2_url()
  617. if self.data['type'] == 'fork':
  618. links = [link for link in self.data['children'] if link['to'] in node_mapping]
  619. if len(links) != len(self.data['children']):
  620. LOG.warn('Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s' \
  621. % (len(links), len(self.data['children']), links, self.data['children']))
  622. self.data['children'] = links
  623. if self.data['type'] == JavaDocumentAction.TYPE:
  624. notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
  625. properties = notebook.get_data()['snippets'][0]['properties']
  626. self.data['properties']['main_class'] = properties['class']
  627. self.data['properties']['app_jar'] = properties['app_jar'] # Not used here
  628. self.data['properties']['files'] = [{'value': f['path']} for f in properties['files']]
  629. self.data['properties']['arguments'] = [{'value': prop} for prop in properties['arguments']]
  630. data = {
  631. 'node': self.data,
  632. 'mapping': mapping,
  633. 'node_mapping': node_mapping,
  634. 'workflow_mapping': workflow_mapping
  635. }
  636. return django_mako.render_to_string(self.get_template_name(), data)
  637. @property
  638. def id(self):
  639. return self.data['id']
  640. @property
  641. def name(self):
  642. return self.data['name']
  643. @property
  644. def sla_enabled(self):
  645. return 'sla' in self.data['properties'] and self.data['properties']['sla'] and self.data['properties']['sla'][0].get('value')
  646. def _augment_data(self):
  647. self.data['type'] = self.data['type'].replace('-widget', '')
  648. self.data['uuid'] = self.data['id']
  649. # Action Node
  650. if 'credentials' not in self.data['properties']:
  651. self.data['properties']['credentials'] = []
  652. if 'prepares' not in self.data['properties']:
  653. self.data['properties']['prepares'] = []
  654. if 'job_xml' not in self.data['properties']:
  655. self.data['properties']['job_xml'] = []
  656. if 'properties' not in self.data['properties']:
  657. self.data['properties']['properties'] = []
  658. if 'params' not in self.data['properties']:
  659. self.data['properties']['params'] = []
  660. if 'files' not in self.data['properties']:
  661. self.data['properties']['files'] = []
  662. if 'archives' not in self.data['properties']:
  663. self.data['properties']['archives'] = []
  664. if 'sla' not in self.data['properties']:
  665. self.data['properties']['sla'] = WorkflowConfiguration.SLA_DEFAULT
  666. if 'retry_max' not in self.data['properties']:
  667. self.data['properties']['retry_max'] = []
  668. if 'retry_interval' not in self.data['properties']:
  669. self.data['properties']['retry_interval'] = []
  670. # Backward compatibility
  671. _upgrade_older_node(self.data)
  672. def get_template_name(self):
  673. node_type = self.data['type']
  674. if self.data['type'] == JavaDocumentAction.TYPE:
  675. node_type = JavaAction.TYPE
  676. return 'editor2/gen/workflow-%s.xml.mako' % node_type
  677. def find_parameters(self):
  678. return find_parameters(self) + (find_parameters(self, ['sla']) if self.sla_enabled else [])
  679. def _upgrade_older_node(node):
  680. if node['type'] in ('sqoop', 'sqoop-widget') and 'arguments' not in node['properties']:
  681. node['properties']['arguments'] = node['properties']['parameters']
  682. if node['type'] in ('kill', 'kill-widget') and 'to' not in node['properties']:
  683. node['properties']['enableMail'] = False
  684. node['properties']['to'] = ''
  685. node['properties']['cc'] = ''
  686. node['properties']['subject'] = ''
  687. node['properties']['body'] = ''
  688. if node['type'] == 'email-widget' and 'bcc' not in node['properties']:
  689. node['properties']['bcc'] = ''
  690. node['properties']['content_type'] = 'text/plain'
  691. node['properties']['attachment'] = ''
  692. if node['type'] == 'spark-widget' and 'files' not in node['properties']:
  693. node['properties']['files'] = []
  694. class Action(object):
  695. @classmethod
  696. def get_fields(cls):
  697. credentials = [cls.DEFAULT_CREDENTIALS] if hasattr(cls, 'DEFAULT_CREDENTIALS') else []
  698. return [(f['name'], f['value']) for f in cls.FIELDS.itervalues()] + [('sla', WorkflowConfiguration.SLA_DEFAULT), ('credentials', credentials)]
  699. class StartNode(Action):
  700. TYPE = 'start'
  701. FIELDS = {}
  702. class EndNode(Action):
  703. TYPE = 'end'
  704. FIELDS = {}
  705. class PigAction(Action):
  706. TYPE = 'pig'
  707. FIELDS = {
  708. 'script_path': {
  709. 'name': 'script_path',
  710. 'label': _('Script'),
  711. 'value': '',
  712. 'help_text': _('Path to the script on HDFS.'),
  713. 'type': ''
  714. },
  715. 'parameters': {
  716. 'name': 'parameters',
  717. 'label': _('Parameters'),
  718. 'value': [],
  719. 'help_text': _('The Pig parameters of the script without -param. e.g. INPUT=${inputDir}'),
  720. 'type': ''
  721. },
  722. 'arguments': {
  723. 'name': 'arguments',
  724. 'label': _('Arguments'),
  725. 'value': [],
  726. 'help_text': _('The Pig parameters of the script as is. e.g. -param, INPUT=${inputDir}'),
  727. 'type': ''
  728. },
  729. # Common
  730. 'files': {
  731. 'name': 'files',
  732. 'label': _('Files'),
  733. 'value': [],
  734. 'help_text': _('Files put in the running directory.'),
  735. 'type': ''
  736. },
  737. 'archives': {
  738. 'name': 'archives',
  739. 'label': _('Archives'),
  740. 'value': [],
  741. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  742. 'type': ''
  743. },
  744. 'job_properties': {
  745. 'name': 'job_properties',
  746. 'label': _('Hadoop job properties'),
  747. 'value': [],
  748. 'help_text': _('value, e.g. production'),
  749. 'type': ''
  750. },
  751. 'prepares': {
  752. 'name': 'prepares',
  753. 'label': _('Prepares'),
  754. 'value': [],
  755. 'help_text': _('Path to manipulate before starting the application.'),
  756. 'type': ''
  757. },
  758. 'job_xml': {
  759. 'name': 'job_xml',
  760. 'label': _('Job XML'),
  761. 'value': [],
  762. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  763. 'type': ''
  764. },
  765. 'retry_max': {
  766. 'name': 'retry_max',
  767. 'label': _('Max retry'),
  768. 'value': [],
  769. 'help_text': _('Number of times, default is 3'),
  770. 'type': ''
  771. },
  772. 'retry_interval': {
  773. 'name': 'retry_interval',
  774. 'label': _('Retry interval'),
  775. 'value': [],
  776. 'help_text': _('Wait time in minutes, default is 10'),
  777. 'type': ''
  778. }
  779. }
  780. @classmethod
  781. def get_mandatory_fields(cls):
  782. return [cls.FIELDS['script_path']]
  783. class JavaAction(Action):
  784. TYPE = 'java'
  785. FIELDS = {
  786. 'jar_path': {
  787. 'name': 'jar_path',
  788. 'label': _('Jar name'),
  789. 'value': '',
  790. 'help_text': _('Path to the jar on HDFS.'),
  791. 'type': ''
  792. },
  793. 'main_class': {
  794. 'name': 'main_class',
  795. 'label': _('Main class'),
  796. 'value': '',
  797. 'help_text': _('Java class. e.g. org.apache.hadoop.examples.Grep'),
  798. 'type': 'text'
  799. },
  800. 'arguments': {
  801. 'name': 'arguments',
  802. 'label': _('Arguments'),
  803. 'value': [],
  804. 'help_text': _('Arguments of the main method. The value of each arg element is considered a single argument '
  805. 'and they are passed to the main method in the same order.'),
  806. 'type': ''
  807. },
  808. 'java_opts': {
  809. 'name': 'java_opts',
  810. 'label': _('Java options'),
  811. 'value': [],
  812. 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b'),
  813. 'type': ''
  814. },
  815. 'capture_output': {
  816. 'name': 'capture_output',
  817. 'label': _('Capture output'),
  818. 'value': False,
  819. 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  820. 'command output must be in Java Properties file format and it must not exceed 2KB. '
  821. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  822. 'via the String action:output(String node, String key) function') % {'program': TYPE.title()},
  823. 'type': ''
  824. },
  825. # Common
  826. 'files': {
  827. 'name': 'files',
  828. 'label': _('Files'),
  829. 'value': [],
  830. 'help_text': _('Files put in the running directory.'),
  831. 'type': ''
  832. },
  833. 'archives': {
  834. 'name': 'archives',
  835. 'label': _('Archives'),
  836. 'value': [],
  837. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  838. 'type': ''
  839. },
  840. 'job_properties': {
  841. 'name': 'job_properties',
  842. 'label': _('Hadoop job properties'),
  843. 'value': [],
  844. 'help_text': _('value, e.g. production'),
  845. 'type': ''
  846. },
  847. 'prepares': {
  848. 'name': 'prepares',
  849. 'label': _('Prepares'),
  850. 'value': [],
  851. 'help_text': _('Path to manipulate before starting the application.'),
  852. 'type': ''
  853. },
  854. 'job_xml': {
  855. 'name': 'job_xml',
  856. 'label': _('Job XML'),
  857. 'value': [],
  858. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  859. 'type': ''
  860. },
  861. 'retry_max': {
  862. 'name': 'retry_max',
  863. 'label': _('Max retry'),
  864. 'value': [],
  865. 'help_text': _('Number of times, default is 3'),
  866. 'type': ''
  867. },
  868. 'retry_interval': {
  869. 'name': 'retry_interval',
  870. 'label': _('Retry interval'),
  871. 'value': [],
  872. 'help_text': _('Wait time in minutes, default is 10'),
  873. 'type': ''
  874. }
  875. }
  876. @classmethod
  877. def get_mandatory_fields(cls):
  878. return [cls.FIELDS['jar_path'], cls.FIELDS['main_class']]
  879. class HiveAction(Action):
  880. TYPE = 'hive'
  881. DEFAULT_CREDENTIALS = 'hcat'
  882. FIELDS = {
  883. 'script_path': {
  884. 'name': 'script_path',
  885. 'label': _('Script'),
  886. 'value': '',
  887. 'help_text': _('Path to the script on HDFS.'),
  888. 'type': ''
  889. },
  890. 'parameters': {
  891. 'name': 'parameters',
  892. 'label': _('Parameters'),
  893. 'value': [],
  894. 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
  895. 'type': ''
  896. },
  897. # Common
  898. 'files': {
  899. 'name': 'files',
  900. 'label': _('Files'),
  901. 'value': [],
  902. 'help_text': _('Files put in the running directory.'),
  903. 'type': ''
  904. },
  905. 'archives': {
  906. 'name': 'archives',
  907. 'label': _('Archives'),
  908. 'value': [],
  909. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  910. 'type': ''
  911. },
  912. 'job_properties': {
  913. 'name': 'job_properties',
  914. 'label': _('Hadoop job properties'),
  915. 'value': [],
  916. 'help_text': _('value, e.g. production'),
  917. 'type': ''
  918. },
  919. 'prepares': {
  920. 'name': 'prepares',
  921. 'label': _('Prepares'),
  922. 'value': [],
  923. 'help_text': _('Path to manipulate before starting the application.'),
  924. 'type': ''
  925. },
  926. 'hive_xml': {
  927. 'name': 'hive_xml',
  928. 'label': _('Hive XML'),
  929. 'value': '',
  930. 'help_text': _('Refer to a hive-site.xml for connecting to Hive'),
  931. 'type': ''
  932. },
  933. 'retry_max': {
  934. 'name': 'retry_max',
  935. 'label': _('Max retry'),
  936. 'value': [],
  937. 'help_text': _('Number of times, default is 3'),
  938. 'type': ''
  939. },
  940. 'retry_interval': {
  941. 'name': 'retry_interval',
  942. 'label': _('Retry interval'),
  943. 'value': [],
  944. 'help_text': _('Wait time in minutes, default is 10'),
  945. 'type': ''
  946. }
  947. }
  948. @classmethod
  949. def get_mandatory_fields(cls):
  950. return [cls.FIELDS['script_path'], cls.FIELDS['hive_xml']]
  951. def _get_hiveserver2_url():
  952. try:
  953. from beeswax.hive_site import hiveserver2_jdbc_url
  954. return hiveserver2_jdbc_url()
  955. except Exception, e:
  956. # Might fail is Hive is disabled
  957. LOG.warn('Could not guess HiveServer2 URL: %s' % smart_str(e))
  958. return 'jdbc:hive2://localhost:10000/default'
  959. class HiveServer2Action(Action):
  960. TYPE = 'hive2'
  961. DEFAULT_CREDENTIALS = 'hive2'
  962. FIELDS = {
  963. 'script_path': {
  964. 'name': 'script_path',
  965. 'label': _('Script'),
  966. 'value': '',
  967. 'help_text': _('Path to the script on HDFS.'),
  968. 'type': ''
  969. },
  970. 'parameters': {
  971. 'name': 'parameters',
  972. 'label': _('Parameters'),
  973. 'value': [],
  974. 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
  975. 'type': ''
  976. },
  977. # Common
  978. 'jdbc_url': {
  979. 'name': 'jdbc_url',
  980. 'label': _('HiveServer2 URL'),
  981. 'value': "",
  982. 'help_text': _('e.g. jdbc:hive2://localhost:10000/default. JDBC URL for the Hive Server 2.'),
  983. 'type': ''
  984. },
  985. 'password': {
  986. 'name': 'password',
  987. 'label': _('Password'),
  988. 'value': '',
  989. 'help_text': _('The password element must contain the password of the current user. However, the password is only used if Hive Server 2 is backed by '
  990. 'something requiring a password (e.g. LDAP); non-secured Hive Server 2 or Kerberized Hive Server 2 don\'t require a password.'),
  991. 'type': ''
  992. },
  993. 'files': {
  994. 'name': 'files',
  995. 'label': _('Files'),
  996. 'value': [],
  997. 'help_text': _('Files put in the running directory.'),
  998. 'type': ''
  999. },
  1000. 'archives': {
  1001. 'name': 'archives',
  1002. 'label': _('Archives'),
  1003. 'value': [],
  1004. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1005. 'type': ''
  1006. },
  1007. 'job_properties': {
  1008. 'name': 'job_properties',
  1009. 'label': _('Hadoop job properties'),
  1010. 'value': [],
  1011. 'help_text': _('value, e.g. production'),
  1012. 'type': ''
  1013. },
  1014. 'prepares': {
  1015. 'name': 'prepares',
  1016. 'label': _('Prepares'),
  1017. 'value': [],
  1018. 'help_text': _('Path to manipulate before starting the application.'),
  1019. 'type': ''
  1020. },
  1021. 'job_xml': {
  1022. 'name': 'job_xml',
  1023. 'label': _('Job XML'),
  1024. 'value': '',
  1025. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1026. 'type': ''
  1027. },
  1028. 'retry_max': {
  1029. 'name': 'retry_max',
  1030. 'label': _('Max retry'),
  1031. 'value': [],
  1032. 'help_text': _('Number of times, default is 3'),
  1033. 'type': ''
  1034. },
  1035. 'retry_interval': {
  1036. 'name': 'retry_interval',
  1037. 'label': _('Retry interval'),
  1038. 'value': [],
  1039. 'help_text': _('Wait time in minutes, default is 10'),
  1040. 'type': ''
  1041. }
  1042. }
  1043. @classmethod
  1044. def get_mandatory_fields(cls):
  1045. return [cls.FIELDS['script_path']]
  1046. class SubWorkflowAction(Action):
  1047. TYPE = 'subworkflow'
  1048. FIELDS = {
  1049. 'workflow': {
  1050. 'name': 'workflow',
  1051. 'label': _('Sub-workflow'),
  1052. 'value': None,
  1053. 'help_text': _('The sub-workflow application to include. You must own all the sub-workflows'),
  1054. 'type': 'workflow'
  1055. },
  1056. 'propagate_configuration': {
  1057. 'name': 'propagate_configuration',
  1058. 'label': _('Propagate configuration'),
  1059. 'value': True,
  1060. 'help_text': _('If the workflow job configuration should be propagated to the child workflow.'),
  1061. 'type': ''
  1062. },
  1063. 'job_properties': {
  1064. 'name': 'job_properties',
  1065. 'label': _('Hadoop job properties'),
  1066. 'value': [],
  1067. 'help_text': _('Can be used to specify the job properties that are required to run the child workflow job.'),
  1068. 'type': ''
  1069. },
  1070. 'retry_max': {
  1071. 'name': 'retry_max',
  1072. 'label': _('Max retry'),
  1073. 'value': [],
  1074. 'help_text': _('Number of times, default is 3'),
  1075. 'type': ''
  1076. },
  1077. 'retry_interval': {
  1078. 'name': 'retry_interval',
  1079. 'label': _('Retry interval'),
  1080. 'value': [],
  1081. 'help_text': _('Wait time in minutes, default is 10'),
  1082. 'type': ''
  1083. }
  1084. }
  1085. @classmethod
  1086. def get_mandatory_fields(cls):
  1087. return [cls.FIELDS['workflow']]
  1088. class SqoopAction(Action):
  1089. TYPE = 'sqoop'
  1090. FIELDS = {
  1091. 'command': {
  1092. 'name': 'command',
  1093. 'label': _('Sqoop command'),
  1094. 'value': 'import --connect jdbc:hsqldb:file:db.hsqldb --table TT --target-dir hdfs://localhost:8020/user/foo -m 1',
  1095. 'help_text': _('The full %(type)s command. Either put it here or split it by spaces and insert the parts as multiple parameters below.') % {'type': TYPE},
  1096. 'type': 'textarea'
  1097. },
  1098. 'arguments': {
  1099. 'name': 'arguments',
  1100. 'label': _('Arguments'),
  1101. 'value': [],
  1102. 'help_text': _('If no command is specified, split the command by spaces and insert the %(type)s parameters '
  1103. 'here e.g. import, --connect, jdbc:hsqldb:file:db.hsqldb, ...') % {'type': TYPE},
  1104. 'type': ''
  1105. },
  1106. # Common
  1107. 'files': {
  1108. 'name': 'files',
  1109. 'label': _('Files'),
  1110. 'value': [],
  1111. 'help_text': _('Files put in the running directory.'),
  1112. 'type': ''
  1113. },
  1114. 'archives': {
  1115. 'name': 'archives',
  1116. 'label': _('Archives'),
  1117. 'value': [],
  1118. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1119. 'type': ''
  1120. },
  1121. 'job_properties': {
  1122. 'name': 'job_properties',
  1123. 'label': _('Hadoop job properties'),
  1124. 'value': [],
  1125. 'help_text': _('value, e.g. production'),
  1126. 'type': ''
  1127. },
  1128. 'prepares': {
  1129. 'name': 'prepares',
  1130. 'label': _('Prepares'),
  1131. 'value': [],
  1132. 'help_text': _('Path to manipulate before starting the application.'),
  1133. 'type': ''
  1134. },
  1135. 'job_xml': {
  1136. 'name': 'job_xml',
  1137. 'label': _('Job XML'),
  1138. 'value': '',
  1139. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1140. 'type': ''
  1141. },
  1142. 'retry_max': {
  1143. 'name': 'retry_max',
  1144. 'label': _('Max retry'),
  1145. 'value': [],
  1146. 'help_text': _('Number of times, default is 3'),
  1147. 'type': ''
  1148. },
  1149. 'retry_interval': {
  1150. 'name': 'retry_interval',
  1151. 'label': _('Retry interval'),
  1152. 'value': [],
  1153. 'help_text': _('Wait time in minutes, default is 10'),
  1154. 'type': ''
  1155. }
  1156. }
  1157. @classmethod
  1158. def get_mandatory_fields(cls):
  1159. return [cls.FIELDS['command']]
  1160. class MapReduceAction(Action):
  1161. TYPE = 'mapreduce'
  1162. FIELDS = {
  1163. 'jar_path': {
  1164. 'name': 'jar_path',
  1165. 'label': _('Jar name'),
  1166. 'value': '',
  1167. 'help_text': _('Path to the jar on HDFS.'),
  1168. 'type': ''
  1169. },
  1170. # Common
  1171. 'files': {
  1172. 'name': 'files',
  1173. 'label': _('Files'),
  1174. 'value': [],
  1175. 'help_text': _('Files put in the running directory.'),
  1176. 'type': ''
  1177. },
  1178. 'archives': {
  1179. 'name': 'archives',
  1180. 'label': _('Archives'),
  1181. 'value': [],
  1182. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1183. 'type': ''
  1184. },
  1185. 'job_properties': {
  1186. 'name': 'job_properties',
  1187. 'label': _('Hadoop job properties'),
  1188. 'value': [],
  1189. 'help_text': _('value, e.g. production'),
  1190. 'type': ''
  1191. },
  1192. 'prepares': {
  1193. 'name': 'prepares',
  1194. 'label': _('Prepares'),
  1195. 'value': [],
  1196. 'help_text': _('Path to manipulate before starting the application.'),
  1197. 'type': ''
  1198. },
  1199. 'job_xml': {
  1200. 'name': 'job_xml',
  1201. 'label': _('Job XML'),
  1202. 'value': '',
  1203. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1204. 'type': ''
  1205. },
  1206. 'retry_max': {
  1207. 'name': 'retry_max',
  1208. 'label': _('Max retry'),
  1209. 'value': [],
  1210. 'help_text': _('Number of times, default is 3'),
  1211. 'type': ''
  1212. },
  1213. 'retry_interval': {
  1214. 'name': 'retry_interval',
  1215. 'label': _('Retry interval'),
  1216. 'value': [],
  1217. 'help_text': _('Wait time in minutes, default is 10'),
  1218. 'type': ''
  1219. }
  1220. }
  1221. @classmethod
  1222. def get_mandatory_fields(cls):
  1223. return [cls.FIELDS['jar_path']]
  1224. class ShellAction(Action):
  1225. TYPE = 'shell'
  1226. FIELDS = {
  1227. 'shell_command': {
  1228. 'name': 'shell_command',
  1229. 'label': _('Shell command'),
  1230. 'value': '',
  1231. 'help_text': _('Shell command to execute, e.g script.sh'),
  1232. 'type': ''
  1233. },
  1234. 'arguments': {
  1235. 'name': 'arguments',
  1236. 'label': _('Arguments'),
  1237. 'value': [],
  1238. 'help_text': _('One arg, e.g. -l, --help'),
  1239. 'type': ''
  1240. },
  1241. 'env_var': {
  1242. 'name': 'env_var',
  1243. 'label': _('Environment variables'),
  1244. 'value': [],
  1245. 'help_text': _('e.g. MAX=10 or PATH=$PATH:mypath'),
  1246. 'type': ''
  1247. },
  1248. 'capture_output': {
  1249. 'name': 'capture_output',
  1250. 'label': _('Capture output'),
  1251. 'value': True,
  1252. 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  1253. 'command output must be in Java Properties file format and it must not exceed 2KB. '
  1254. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  1255. 'via the String action:output(String node, String key) function') % {'program': TYPE},
  1256. 'type': ''
  1257. },
  1258. # Common
  1259. 'files': {
  1260. 'name': 'files',
  1261. 'label': _('Files'),
  1262. 'value': [],
  1263. 'help_text': _('Files put in the running directory.'),
  1264. 'type': ''
  1265. },
  1266. 'archives': {
  1267. 'name': 'archives',
  1268. 'label': _('Archives'),
  1269. 'value': [],
  1270. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1271. 'type': ''
  1272. },
  1273. 'job_properties': {
  1274. 'name': 'job_properties',
  1275. 'label': _('Hadoop job properties'),
  1276. 'value': [],
  1277. 'help_text': _('value, e.g. production'),
  1278. 'type': ''
  1279. },
  1280. 'prepares': {
  1281. 'name': 'prepares',
  1282. 'label': _('Prepares'),
  1283. 'value': [],
  1284. 'help_text': _('Path to manipulate before starting the application.'),
  1285. 'type': ''
  1286. },
  1287. 'job_xml': {
  1288. 'name': 'job_xml',
  1289. 'label': _('Job XML'),
  1290. 'value': '',
  1291. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1292. 'type': ''
  1293. },
  1294. 'retry_max': {
  1295. 'name': 'retry_max',
  1296. 'label': _('Max retry'),
  1297. 'value': [],
  1298. 'help_text': _('Number of times, default is 3'),
  1299. 'type': ''
  1300. },
  1301. 'retry_interval': {
  1302. 'name': 'retry_interval',
  1303. 'label': _('Retry interval'),
  1304. 'value': [],
  1305. 'help_text': _('Wait time in minutes, default is 10'),
  1306. 'type': ''
  1307. }
  1308. }
  1309. @classmethod
  1310. def get_mandatory_fields(cls):
  1311. return [cls.FIELDS['shell_command']]
  1312. class SshAction(Action):
  1313. TYPE = 'ssh'
  1314. FIELDS = {
  1315. 'host': {
  1316. 'name': 'host',
  1317. 'label': _('User and Host'),
  1318. 'value': 'user@host.com',
  1319. 'help_text': _('Where the shell will be executed.'),
  1320. 'type': 'text'
  1321. },
  1322. 'ssh_command': {
  1323. 'name': 'ssh_command',
  1324. 'label': _('Ssh command'),
  1325. 'value': 'ls',
  1326. 'help_text': _('The path of the Shell command to execute.'),
  1327. 'type': 'textarea'
  1328. },
  1329. 'arguments': {
  1330. 'name': 'arguments',
  1331. 'label': _('Arguments'),
  1332. 'value': [],
  1333. 'help_text': _('One arg, e.g. -l, --help'),
  1334. 'type': ''
  1335. },
  1336. 'capture_output': {
  1337. 'name': 'capture_output',
  1338. 'label': _('Capture output'),
  1339. 'value': True,
  1340. 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  1341. 'command output must be in Java Properties file format and it must not exceed 2KB. '
  1342. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  1343. 'via the String action:output(String node, String key) function') % {'program': TYPE},
  1344. 'type': ''
  1345. },
  1346. # Common
  1347. 'retry_max': {
  1348. 'name': 'retry_max',
  1349. 'label': _('Max retry'),
  1350. 'value': [],
  1351. 'help_text': _('Number of times, default is 3'),
  1352. 'type': ''
  1353. },
  1354. 'retry_interval': {
  1355. 'name': 'retry_interval',
  1356. 'label': _('Retry interval'),
  1357. 'value': [],
  1358. 'help_text': _('Wait time in minutes, default is 10'),
  1359. 'type': ''
  1360. }
  1361. }
  1362. @classmethod
  1363. def get_mandatory_fields(cls):
  1364. return [cls.FIELDS['host'], cls.FIELDS['ssh_command']]
  1365. class FsAction(Action):
  1366. TYPE = 'fs'
  1367. FIELDS = {
  1368. 'deletes': {
  1369. 'name': 'deletes',
  1370. 'label': _('Delete path'),
  1371. 'value': [],
  1372. 'help_text': _('Deletes recursively all content.'),
  1373. 'type': ''
  1374. },
  1375. 'mkdirs': {
  1376. 'name': 'mkdirs',
  1377. 'label': _('Create directory'),
  1378. 'value': [],
  1379. 'help_text': _('Sub directories are created if needed.'),
  1380. 'type': ''
  1381. },
  1382. 'moves': {
  1383. 'name': 'moves',
  1384. 'label': _('Move file or directory'),
  1385. 'value': [],
  1386. 'help_text': _('Destination.'),
  1387. 'type': ''
  1388. },
  1389. 'chmods': {
  1390. 'name': 'chmods',
  1391. 'label': _('Change permissions'),
  1392. 'value': [],
  1393. 'help_text': _('File or directory.'),
  1394. 'type': ''
  1395. },
  1396. 'touchzs': {
  1397. 'name': 'touchzs',
  1398. 'label': _('Create or touch a file'),
  1399. 'value': [],
  1400. 'help_text': _('Or update its modification date.'),
  1401. 'type': ''
  1402. },
  1403. 'chgrps': {
  1404. 'name': 'chgrps',
  1405. 'label': _('Change the group'),
  1406. 'value': [],
  1407. 'help_text': _('File or directory.'),
  1408. 'type': ''
  1409. },
  1410. # Common
  1411. 'retry_max': {
  1412. 'name': 'retry_max',
  1413. 'label': _('Max retry'),
  1414. 'value': [],
  1415. 'help_text': _('Number of times, default is 3'),
  1416. 'type': ''
  1417. },
  1418. 'retry_interval': {
  1419. 'name': 'retry_interval',
  1420. 'label': _('Retry interval'),
  1421. 'value': [],
  1422. 'help_text': _('Wait time in minutes, default is 10'),
  1423. 'type': ''
  1424. }
  1425. }
  1426. @classmethod
  1427. def get_mandatory_fields(cls):
  1428. return []
  1429. class EmailAction(Action):
  1430. TYPE = 'email'
  1431. FIELDS = {
  1432. 'to': {
  1433. 'name': 'to',
  1434. 'label': _('To addresses'),
  1435. 'value': '',
  1436. 'help_text': _('Comma-separated values'),
  1437. 'type': 'text'
  1438. },
  1439. 'cc': {
  1440. 'name': 'cc',
  1441. 'label': _('cc'),
  1442. 'value': '',
  1443. 'help_text': _('Comma-separated values'),
  1444. 'type': 'text'
  1445. },
  1446. 'bcc': {
  1447. 'name': 'bcc',
  1448. 'label': _('bcc'),
  1449. 'value': '',
  1450. 'help_text': _('Comma-separated values'),
  1451. 'type': 'text'
  1452. },
  1453. 'subject': {
  1454. 'name': 'subject',
  1455. 'label': _('Subject'),
  1456. 'value': '',
  1457. 'help_text': _('Plain-text'),
  1458. 'type': 'text'
  1459. },
  1460. 'body': {
  1461. 'name': 'body',
  1462. 'label': _('Body'),
  1463. 'value': '',
  1464. 'help_text': _('Plain-text'),
  1465. 'type': 'textarea'
  1466. },
  1467. 'attachment': {
  1468. 'name': 'attachment',
  1469. 'label': _('Attachment'),
  1470. 'value': '',
  1471. 'help_text': _('Comma separated list of HDFS files.'),
  1472. 'type': ''
  1473. },
  1474. 'content_type': {
  1475. 'name': 'content_type',
  1476. 'label': _('Content-type'),
  1477. 'value': 'text/plain',
  1478. 'help_text': _('Default is text/plain'),
  1479. 'type': 'text'
  1480. },
  1481. # Common
  1482. 'retry_max': {
  1483. 'name': 'retry_max',
  1484. 'label': _('Max retry'),
  1485. 'value': [],
  1486. 'help_text': _('Number of times, default is 3'),
  1487. 'type': ''
  1488. },
  1489. 'retry_interval': {
  1490. 'name': 'retry_interval',
  1491. 'label': _('Retry interval'),
  1492. 'value': [],
  1493. 'help_text': _('Wait time in minutes, default is 10'),
  1494. 'type': ''
  1495. }
  1496. }
  1497. @classmethod
  1498. def get_mandatory_fields(cls):
  1499. return [cls.FIELDS['to'], cls.FIELDS['subject'], cls.FIELDS['body']]
  1500. class StreamingAction(Action):
  1501. TYPE = 'streaming'
  1502. FIELDS = {
  1503. 'mapper': {
  1504. 'name': 'mapper',
  1505. 'label': _('Mapper'),
  1506. 'value': '',
  1507. 'help_text': _('The executable/script to be used as mapper.'),
  1508. 'type': ''
  1509. },
  1510. 'reducer': {
  1511. 'name': 'reducer',
  1512. 'label': _('Reducer'),
  1513. 'value': '',
  1514. 'help_text': _('The executable/script to be used as reducer.'),
  1515. 'type': ''
  1516. },
  1517. # Common
  1518. 'files': {
  1519. 'name': 'files',
  1520. 'label': _('Files'),
  1521. 'value': [],
  1522. 'help_text': _('Files put in the running directory.')
  1523. },
  1524. 'archives': {
  1525. 'name': 'archives',
  1526. 'label': _('Archives'),
  1527. 'value': [],
  1528. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.')
  1529. },
  1530. 'job_properties': {
  1531. 'name': 'job_properties',
  1532. 'label': _('Hadoop job properties'),
  1533. 'value': [],
  1534. 'help_text': _('value, e.g. production')
  1535. },
  1536. 'prepares': {
  1537. 'name': 'prepares',
  1538. 'label': _('Prepares'),
  1539. 'value': [],
  1540. 'help_text': _('Path to manipulate before starting the application.')
  1541. },
  1542. 'job_xml': {
  1543. 'name': 'job_xml',
  1544. 'label': _('Job XML'),
  1545. 'value': '',
  1546. 'help_text': _('Refer to a Hadoop JobConf job.xml')
  1547. },
  1548. 'retry_max': {
  1549. 'name': 'retry_max',
  1550. 'label': _('Max retry'),
  1551. 'value': [],
  1552. 'help_text': _('Number of times, default is 3'),
  1553. 'type': ''
  1554. },
  1555. 'retry_interval': {
  1556. 'name': 'retry_interval',
  1557. 'label': _('Retry interval'),
  1558. 'value': [],
  1559. 'help_text': _('Wait time in minutes, default is 10'),
  1560. 'type': ''
  1561. }
  1562. }
  1563. @classmethod
  1564. def get_mandatory_fields(cls):
  1565. return [cls.FIELDS['mapper'], cls.FIELDS['reducer']]
  1566. class DistCpAction(Action):
  1567. TYPE = 'distcp'
  1568. FIELDS = {
  1569. 'distcp_parameters': {
  1570. 'name': 'distcp_parameters',
  1571. 'label': _('Arguments'),
  1572. 'value': [{'value': ''}, {'value': ''}],
  1573. 'help_text': _('Options first, then source / destination paths'),
  1574. 'type': 'distcp'
  1575. },
  1576. # Common
  1577. 'prepares': {
  1578. 'name': 'prepares',
  1579. 'label': _('Prepares'),
  1580. 'value': [],
  1581. 'help_text': _('Path to manipulate before starting the application.')
  1582. },
  1583. 'job_properties': {
  1584. 'name': 'job_properties',
  1585. 'label': _('Hadoop job properties'),
  1586. 'value': [],
  1587. 'help_text': _('value, e.g. production')
  1588. },
  1589. 'java_opts': {
  1590. 'name': 'java_opts',
  1591. 'label': _('Java options'),
  1592. 'value': '',
  1593. 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b')
  1594. },
  1595. 'retry_max': {
  1596. 'name': 'retry_max',
  1597. 'label': _('Max retry'),
  1598. 'value': [],
  1599. 'help_text': _('Number of times, default is 3'),
  1600. 'type': ''
  1601. },
  1602. 'retry_interval': {
  1603. 'name': 'retry_interval',
  1604. 'label': _('Retry interval'),
  1605. 'value': [],
  1606. 'help_text': _('Wait time in minutes, default is 10'),
  1607. 'type': ''
  1608. }
  1609. }
  1610. @classmethod
  1611. def get_mandatory_fields(cls):
  1612. return [cls.FIELDS['distcp_parameters']]
  1613. class SparkAction(Action):
  1614. TYPE = 'spark'
  1615. FIELDS = {
  1616. 'spark_master': {
  1617. 'name': 'spark_master',
  1618. 'label': _('Spark Master'),
  1619. 'value': 'local[*]',
  1620. 'help_text': _('Ex: spark://host:port, mesos://host:port, yarn, or local.'),
  1621. 'type': ''
  1622. },
  1623. 'mode': {
  1624. 'name': 'mode',
  1625. 'label': _('Mode'),
  1626. 'value': 'client',
  1627. 'help_text': _('e.g. client,cluster'),
  1628. 'type': ''
  1629. },
  1630. 'app_name': {
  1631. 'name': 'app_name',
  1632. 'label': _('App name'),
  1633. 'value': 'MySpark',
  1634. 'help_text': _('The name of the spark application'),
  1635. 'type': ''
  1636. },
  1637. 'files': {
  1638. 'name': 'files',
  1639. 'label': _('Files'),
  1640. 'value': [],
  1641. 'help_text': _('Files put in the running directory.'),
  1642. 'type': ''
  1643. },
  1644. 'class': {
  1645. 'name': 'class',
  1646. 'label': _('Main class'),
  1647. 'value': '',
  1648. 'help_text': _("e.g. org.apache.spark.examples.mllib.JavaALS."),
  1649. 'type': 'text'
  1650. },
  1651. 'jars': {
  1652. 'name': 'jars',
  1653. 'label': _('Jars/py files'),
  1654. 'value': '',
  1655. 'help_text': _('Comma separated list of jars or python HDFS files.'),
  1656. 'type': ''
  1657. },
  1658. 'spark_opts': {
  1659. 'name': 'spark_opts',
  1660. 'label': _('Options list'),
  1661. 'value': '',
  1662. 'help_text': _('Ex: --executor-memory 20G --num-executors 50'),
  1663. 'type': ''
  1664. },
  1665. 'spark_arguments': {
  1666. 'name': 'spark_arguments',
  1667. 'label': _('Arguments'),
  1668. 'value': [],
  1669. 'help_text': _('Arguments, one by one, e.g. 1000, /path/a.')
  1670. },
  1671. # Common
  1672. 'job_properties': {
  1673. 'name': 'job_properties',
  1674. 'label': _('Hadoop job properties'),
  1675. 'value': [],
  1676. 'help_text': _('value, e.g. production')
  1677. },
  1678. 'prepares': {
  1679. 'name': 'prepares',
  1680. 'label': _('Prepares'),
  1681. 'value': [],
  1682. 'help_text': _('Path to manipulate before starting the application.')
  1683. },
  1684. 'job_xml': {
  1685. 'name': 'job_xml',
  1686. 'label': _('Job XML'),
  1687. 'value': '',
  1688. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1689. 'type': ''
  1690. },
  1691. 'retry_max': {
  1692. 'name': 'retry_max',
  1693. 'label': _('Max retry'),
  1694. 'value': [],
  1695. 'help_text': _('Number of times, default is 3'),
  1696. 'type': ''
  1697. },
  1698. 'retry_interval': {
  1699. 'name': 'retry_interval',
  1700. 'label': _('Retry interval'),
  1701. 'value': [],
  1702. 'help_text': _('Wait time in minutes, default is 10'),
  1703. 'type': ''
  1704. }
  1705. }
  1706. @classmethod
  1707. def get_mandatory_fields(cls):
  1708. return [cls.FIELDS['spark_master'], cls.FIELDS['mode'], cls.FIELDS['jars']]
  1709. class KillAction(Action):
  1710. TYPE = 'kill'
  1711. FIELDS = {
  1712. 'message': {
  1713. 'name': 'message',
  1714. 'label': _('Message'),
  1715. 'value': _('Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]'),
  1716. 'help_text': _('Message to display when the workflow fails. Can contain some EL functions.'),
  1717. 'type': 'textarea'
  1718. }
  1719. }
  1720. @classmethod
  1721. def get_mandatory_fields(cls):
  1722. return [cls.FIELDS['message']]
  1723. class JoinAction(Action):
  1724. TYPE = 'join'
  1725. FIELDS = {}
  1726. @classmethod
  1727. def get_mandatory_fields(cls):
  1728. return []
  1729. class GenericAction(Action):
  1730. TYPE = 'generic'
  1731. FIELDS = {
  1732. 'xml': {
  1733. 'name': 'xml',
  1734. 'label': _('XML of the action'),
  1735. 'value': '<my_action>\n</my_action>',
  1736. 'help_text': _('Insert verbatim the XML of the action to insert into the workflow.'),
  1737. 'type': 'textarea'
  1738. }
  1739. }
  1740. @classmethod
  1741. def get_mandatory_fields(cls):
  1742. return [cls.FIELDS['xml']]
  1743. class ForkNode(Action):
  1744. TYPE = 'fork'
  1745. FIELDS = {}
  1746. @classmethod
  1747. def get_mandatory_fields(cls):
  1748. return []
  1749. class HiveDocumentAction(Action):
  1750. TYPE = 'hive-document'
  1751. DEFAULT_CREDENTIALS = 'hive2'
  1752. FIELDS = {
  1753. 'uuid': {
  1754. 'name': 'uuid',
  1755. 'label': _('Hive query'),
  1756. 'value': '',
  1757. 'help_text': _('Select a saved Hive query you want to schedule.'),
  1758. 'type': 'hive'
  1759. },
  1760. 'parameters': {
  1761. 'name': 'parameters',
  1762. 'label': _('Parameters'),
  1763. 'value': [],
  1764. 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
  1765. 'type': ''
  1766. },
  1767. # Common
  1768. 'jdbc_url': {
  1769. 'name': 'jdbc_url',
  1770. 'label': _('HiveServer2 URL'),
  1771. 'value': "",
  1772. 'help_text': _('e.g. jdbc:hive2://localhost:10000/default. JDBC URL for the Hive Server 2.'),
  1773. 'type': ''
  1774. },
  1775. 'password': {
  1776. 'name': 'password',
  1777. 'label': _('Password'),
  1778. 'value': '',
  1779. 'help_text': _('The password element must contain the password of the current user. However, the password is only used if Hive Server 2 is backed by '
  1780. 'something requiring a password (e.g. LDAP); non-secured Hive Server 2 or Kerberized Hive Server 2 don\'t require a password.'),
  1781. 'type': ''
  1782. },
  1783. 'files': {
  1784. 'name': 'files',
  1785. 'label': _('Files'),
  1786. 'value': [],
  1787. 'help_text': _('Files put in the running directory.'),
  1788. 'type': ''
  1789. },
  1790. 'archives': {
  1791. 'name': 'archives',
  1792. 'label': _('Archives'),
  1793. 'value': [],
  1794. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1795. 'type': ''
  1796. },
  1797. 'job_properties': {
  1798. 'name': 'job_properties',
  1799. 'label': _('Hadoop job properties'),
  1800. 'value': [],
  1801. 'help_text': _('value, e.g. production'),
  1802. 'type': ''
  1803. },
  1804. 'prepares': {
  1805. 'name': 'prepares',
  1806. 'label': _('Prepares'),
  1807. 'value': [],
  1808. 'help_text': _('Path to manipulate before starting the application.'),
  1809. 'type': ''
  1810. },
  1811. 'job_xml': {
  1812. 'name': 'job_xml',
  1813. 'label': _('Job XML'),
  1814. 'value': '',
  1815. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1816. 'type': ''
  1817. },
  1818. 'retry_max': {
  1819. 'name': 'retry_max',
  1820. 'label': _('Max retry'),
  1821. 'value': [],
  1822. 'help_text': _('Number of times, default is 3'),
  1823. 'type': ''
  1824. },
  1825. 'retry_interval': {
  1826. 'name': 'retry_interval',
  1827. 'label': _('Retry interval'),
  1828. 'value': [],
  1829. 'help_text': _('Wait time in minutes, default is 10'),
  1830. 'type': ''
  1831. }
  1832. }
  1833. @classmethod
  1834. def get_mandatory_fields(cls):
  1835. return [cls.FIELDS['uuid']]
  1836. class JavaDocumentAction(Action):
  1837. TYPE = 'java-document'
  1838. FIELDS = {
  1839. 'uuid': {
  1840. 'name': 'uuid',
  1841. 'label': _('Java program'),
  1842. 'value': '',
  1843. 'help_text': _('Select a saved Java program you want to schedule.'),
  1844. 'type': 'java'
  1845. },
  1846. 'arguments': {
  1847. 'name': 'arguments',
  1848. 'label': _('Arguments'),
  1849. 'value': [],
  1850. 'help_text': _('Arguments of the main method. The value of each arg element is considered a single argument '
  1851. 'and they are passed to the main method in the same order.'),
  1852. 'type': ''
  1853. },
  1854. 'java_opts': {
  1855. 'name': 'java_opts',
  1856. 'label': _('Java options'),
  1857. 'value': [],
  1858. 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b'),
  1859. 'type': ''
  1860. },
  1861. 'capture_output': {
  1862. 'name': 'capture_output',
  1863. 'label': _('Capture output'),
  1864. 'value': False,
  1865. 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
  1866. 'command output must be in Java Properties file format and it must not exceed 2KB. '
  1867. 'From within the workflow definition, the output of an %(program)s action node is accessible '
  1868. 'via the String action:output(String node, String key) function') % {'program': TYPE.title()},
  1869. 'type': ''
  1870. },
  1871. # Common
  1872. 'files': {
  1873. 'name': 'files',
  1874. 'label': _('Files'),
  1875. 'value': [],
  1876. 'help_text': _('Files put in the running directory.'),
  1877. 'type': ''
  1878. },
  1879. 'archives': {
  1880. 'name': 'archives',
  1881. 'label': _('Archives'),
  1882. 'value': [],
  1883. 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
  1884. 'type': ''
  1885. },
  1886. 'job_properties': {
  1887. 'name': 'job_properties',
  1888. 'label': _('Hadoop job properties'),
  1889. 'value': [],
  1890. 'help_text': _('value, e.g. production'),
  1891. 'type': ''
  1892. },
  1893. 'prepares': {
  1894. 'name': 'prepares',
  1895. 'label': _('Prepares'),
  1896. 'value': [],
  1897. 'help_text': _('Path to manipulate before starting the application.'),
  1898. 'type': ''
  1899. },
  1900. 'job_xml': {
  1901. 'name': 'job_xml',
  1902. 'label': _('Job XML'),
  1903. 'value': [],
  1904. 'help_text': _('Refer to a Hadoop JobConf job.xml'),
  1905. 'type': ''
  1906. },
  1907. 'retry_max': {
  1908. 'name': 'retry_max',
  1909. 'label': _('Max retry'),
  1910. 'value': [],
  1911. 'help_text': _('Number of times, default is 3'),
  1912. 'type': ''
  1913. },
  1914. 'retry_interval': {
  1915. 'name': 'retry_interval',
  1916. 'label': _('Retry interval'),
  1917. 'value': [],
  1918. 'help_text': _('Wait time in minutes, default is 10'),
  1919. 'type': ''
  1920. }
  1921. }
  1922. @classmethod
  1923. def get_mandatory_fields(cls):
  1924. return [cls.FIELDS['uuid']]
  1925. class DecisionNode(Action):
  1926. TYPE = 'decision'
  1927. FIELDS = {}
  1928. @classmethod
  1929. def get_mandatory_fields(cls):
  1930. return []
  1931. NODES = {
  1932. 'start-widget': StartNode,
  1933. 'end-widget': EndNode,
  1934. 'pig-widget': PigAction,
  1935. 'java-widget': JavaAction,
  1936. 'hive-widget': HiveAction,
  1937. 'hive2-widget': HiveServer2Action,
  1938. 'sqoop-widget': SqoopAction,
  1939. 'mapreduce-widget': MapReduceAction,
  1940. 'subworkflow-widget': SubWorkflowAction,
  1941. 'shell-widget': ShellAction,
  1942. 'ssh-widget': SshAction,
  1943. 'fs-widget': FsAction,
  1944. 'email-widget': EmailAction,
  1945. 'streaming-widget': StreamingAction,
  1946. 'distcp-widget': DistCpAction,
  1947. 'kill-widget': KillAction,
  1948. 'join-widget': JoinAction,
  1949. 'fork-widget': ForkNode,
  1950. 'decision-widget': DecisionNode,
  1951. 'spark-widget': SparkAction,
  1952. 'generic-widget': GenericAction,
  1953. 'hive-document-widget': HiveDocumentAction,
  1954. 'java-document-widget': JavaDocumentAction
  1955. }
  1956. WORKFLOW_NODE_PROPERTIES = {}
  1957. for node in NODES.itervalues():
  1958. WORKFLOW_NODE_PROPERTIES.update(node.FIELDS)
  1959. def find_parameters(instance, fields=None):
  1960. """Find parameters in the given fields"""
  1961. if fields is None:
  1962. fields = NODES['%s-widget' % instance.data['type']].FIELDS.keys()
  1963. params = []
  1964. for field in fields:
  1965. data = instance.data['properties'][field]
  1966. if field == 'sla' and not instance.sla_enabled:
  1967. continue
  1968. if isinstance(data, list):
  1969. params.extend(find_json_parameters(data))
  1970. elif isinstance(data, basestring):
  1971. for match in Template.pattern.finditer(data):
  1972. name = match.group('braced')
  1973. if name is not None:
  1974. params.append(name)
  1975. return params
  1976. def find_json_parameters(fields):
  1977. # Input is list of json dict
  1978. params = []
  1979. for field in fields:
  1980. for data in field.values():
  1981. if isinstance(data, basestring):
  1982. for match in Template.pattern.finditer(data):
  1983. name = match.group('braced')
  1984. if name is not None:
  1985. params.append(name)
  1986. return params
  1987. def find_dollar_variables(text):
  1988. return re.findall('[^\n\\\\]\$([^\{ \'\"\-;\(\)]+)', text, re.MULTILINE)
  1989. def find_dollar_braced_variables(text):
  1990. vars = set()
  1991. for var in re.findall('\$\{([A-Za-z0-9:_-]+)\}', text, re.MULTILINE):
  1992. if ':' in var:
  1993. var = var.split(':', 1)[1]
  1994. vars.add(var)
  1995. return list(vars)
  1996. def import_workflow_from_hue_3_7(old_wf):
  1997. """
  1998. Example of data to transform
  1999. [<Start: start>, <Pig: Pig>, [<Kill: kill>], [<End: end>]]
  2000. [<Start: start>, <Java: TeraGenWorkflow>, <Java: TeraSort>, [<Kill: kill>], [<End: end>]]
  2001. [<Start: start>, [<Fork: fork-34>, [[<Mapreduce: Sleep-1>, <Mapreduce: Sleep-10>], [<Mapreduce: Sleep-5>, [<Fork: fork-38>, [[<Mapreduce: Sleep-3>], [<Mapreduce: Sleep-4>]], <Join: join-39>]]], <Join: join-35>], [<Kill: kill>], [<End: end>]]
  2002. """
  2003. uuids = {}
  2004. old_nodes = old_wf.get_hierarchy()
  2005. wf = Workflow()
  2006. wf_rows = []
  2007. wf_nodes = []
  2008. data = wf.get_data()
  2009. # UUIDs node mapping
  2010. for node in old_wf.node_list:
  2011. if node.name == 'kill':
  2012. node_uuid = '17c9c895-5a16-7443-bb81-f34b30b21548'
  2013. elif node.name == 'start':
  2014. node_uuid = '3f107997-04cc-8733-60a9-a4bb62cebffc'
  2015. elif node.name == 'end':
  2016. node_uuid = '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'
  2017. else:
  2018. node_uuid = str(uuid.uuid4())
  2019. uuids[node.id] = node_uuid
  2020. # Workflow
  2021. data['workflow']['uuid'] = str(uuid.uuid4())
  2022. data['workflow']['name'] = old_wf.name
  2023. data['workflow']['properties']['properties'] = json.loads(old_wf.job_properties)
  2024. data['workflow']['properties']['job_xml'] = old_wf.job_xml
  2025. data['workflow']['properties']['description'] = old_wf.description
  2026. data['workflow']['properties']['schema_version'] = old_wf.schema_version
  2027. data['workflow']['properties']['deployment_dir'] = old_wf.deployment_dir
  2028. data['workflow']['properties']['parameters'] = json.loads(old_wf.parameters)
  2029. data['workflow']['properties']['description'] = old_wf.description
  2030. data['workflow']['properties']['sla'] = old_wf.sla
  2031. data['workflow']['properties']['sla_enabled'] = old_wf.sla_enabled
  2032. data['workflow']['properties']['imported'] = True
  2033. data['workflow']['properties']['wf1_id'] = old_wf.id
  2034. # Layout
  2035. rows = data['layout'][0]['rows']
  2036. def _create_layout(nodes, size=12):
  2037. wf_rows = []
  2038. for node in nodes:
  2039. if type(node) == list and len(node) == 1:
  2040. node = node[0]
  2041. if type(node) != list:
  2042. wf_rows.append({"widgets":[{"size":size, "name": node.name.title(), "id": uuids[node.id], "widgetType": "%s-widget" % node.node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  2043. else:
  2044. if node[0].node_type == 'fork':
  2045. wf_rows.append({"widgets":[{"size":size, "name": 'Fork', "id": uuids[node[0].id], "widgetType": "%s-widget" % node[0].node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  2046. wf_rows.append({
  2047. "id": str(uuid.uuid4()),
  2048. "widgets":[
  2049. ],
  2050. "columns":[
  2051. {
  2052. "id": str(uuid.uuid4()),
  2053. "size": (size / len(node[1])),
  2054. "rows":
  2055. [{
  2056. "id": str(uuid.uuid4()),
  2057. "widgets": c['widgets'],
  2058. "columns":[]
  2059. }
  2060. for c in col] if type(col) == list else [{
  2061. "id": str(uuid.uuid4()),
  2062. "widgets": col['widgets'],
  2063. "columns":[]
  2064. }
  2065. ]
  2066. ,
  2067. "klass":"card card-home card-column span%s" % (size / len(node[1]))
  2068. }
  2069. for col in _create_layout(node[1], size)
  2070. ]
  2071. })
  2072. wf_rows.append({"widgets":[{"size":size, "name": 'Join', "id": uuids[node[2].id], "widgetType": "%s-widget" % node[2].node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
  2073. else:
  2074. wf_rows.append(_create_layout(node, size))
  2075. return wf_rows
  2076. wf_rows = _create_layout(old_nodes)
  2077. if wf_rows:
  2078. data['layout'][0]['rows'] = [data['layout'][0]['rows'][0]] + wf_rows + [data['layout'][0]['rows'][-1]]
  2079. # Content
  2080. def _dig_nodes(nodes):
  2081. for node in nodes:
  2082. if type(node) != list:
  2083. properties = {}
  2084. if '%s-widget' % node.node_type in NODES:
  2085. properties = dict(NODES['%s-widget' % node.node_type].get_fields())
  2086. if node.node_type == 'pig':
  2087. properties['script_path'] = node.script_path
  2088. properties['parameters'] = [param for param in json.loads(node.params) if param['value'] != '-param']
  2089. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2090. properties['archives'] = json.loads(node.archives)
  2091. properties['job_properties'] = json.loads(node.job_properties)
  2092. properties['prepares'] = json.loads(node.prepares)
  2093. properties['job_xml'] = node.job_xml
  2094. properties['description'] = node.description
  2095. properties['sla'] = node.sla
  2096. properties['sla_enabled'] = node.sla_enabled
  2097. elif node.node_type == 'hive':
  2098. properties['script_path'] = node.script_path
  2099. properties['parameters'] = [param for param in json.loads(node.params) if param['value'] != '-param']
  2100. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2101. properties['archives'] = json.loads(node.archives)
  2102. properties['job_properties'] = json.loads(node.job_properties)
  2103. properties['prepares'] = json.loads(node.prepares)
  2104. properties['hive_xml'] = node.job_xml
  2105. properties['description'] = node.description
  2106. properties['sla'] = node.sla
  2107. properties['sla_enabled'] = node.sla_enabled
  2108. elif node.node_type == 'java':
  2109. properties['jar_path'] = node.jar_path
  2110. properties['main_class'] = node.main_class
  2111. properties['arguments'] = [{'value': arg} for arg in node.args.split(' ')]
  2112. properties['java_opts'] = node.java_opts
  2113. properties['capture_output'] = node.capture_output
  2114. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2115. properties['archives'] = json.loads(node.archives)
  2116. properties['job_properties'] = json.loads(node.job_properties)
  2117. properties['prepares'] = json.loads(node.prepares)
  2118. properties['job_xml'] = node.job_xml
  2119. properties['description'] = node.description
  2120. properties['sla'] = node.sla
  2121. properties['sla_enabled'] = node.sla_enabled
  2122. elif node.node_type == 'sqoop':
  2123. properties['command'] = node.script_path
  2124. properties['parameters'] = json.loads(node.params)
  2125. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2126. properties['archives'] = json.loads(node.archives)
  2127. properties['job_properties'] = json.loads(node.job_properties)
  2128. properties['prepares'] = json.loads(node.prepares)
  2129. properties['job_xml'] = node.job_xml
  2130. properties['description'] = node.description
  2131. properties['sla'] = node.sla
  2132. properties['sla_enabled'] = node.sla_enabled
  2133. elif node.node_type == 'mapreduce':
  2134. properties['jar_path'] = node.jar_path
  2135. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2136. properties['archives'] = json.loads(node.archives)
  2137. properties['job_properties'] = json.loads(node.job_properties)
  2138. properties['prepares'] = json.loads(node.prepares)
  2139. properties['job_xml'] = node.job_xml
  2140. properties['description'] = node.description
  2141. properties['sla'] = node.sla
  2142. properties['sla_enabled'] = node.sla_enabled
  2143. elif node.node_type == 'shell':
  2144. properties['shell_command'] = node.command
  2145. properties['arguments'] = json.loads(node.params)
  2146. properties['capture_output'] = node.capture_output
  2147. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2148. properties['archives'] = json.loads(node.archives)
  2149. properties['job_properties'] = json.loads(node.job_properties)
  2150. properties['prepares'] = json.loads(node.prepares)
  2151. properties['job_xml'] = node.job_xml
  2152. properties['description'] = node.description
  2153. properties['sla'] = node.sla
  2154. properties['sla_enabled'] = node.sla_enabled
  2155. elif node.node_type == 'ssh':
  2156. properties['user'] = '%s@%s' % (node.user, node.host)
  2157. properties['ssh_command'] = node.command
  2158. properties['params'] = json.loads(node.params)
  2159. properties['capture_output'] = node.capture_output
  2160. properties['description'] = node.description
  2161. properties['sla'] = node.sla
  2162. properties['sla_enabled'] = node.sla_enabled
  2163. elif node.node_type == 'fs':
  2164. properties['deletes'] = [{'value': f['name']} for f in json.loads(node.deletes)]
  2165. properties['mkdirs'] = [{'value': f['name']} for f in json.loads(node.mkdirs)]
  2166. properties['moves'] = json.loads(node.moves)
  2167. chmods = json.loads(node.chmods)
  2168. for c in chmods:
  2169. c['value'] = c['path']
  2170. c['dir_files'] = False
  2171. properties['chmods'] = chmods
  2172. properties['touchzs'] = [{'value': f['name']} for f in json.loads(node.touchzs)]
  2173. properties['description'] = node.description
  2174. properties['sla'] = node.sla
  2175. properties['sla_enabled'] = node.sla_enabled
  2176. elif node.node_type == 'email':
  2177. properties['to'] = node.to
  2178. properties['cc'] = node.cc
  2179. properties['subject'] = node.subject
  2180. properties['body'] = node.body
  2181. properties['description'] = node.description
  2182. properties['sla'] = node.sla
  2183. properties['sla_enabled'] = node.sla_enabled
  2184. elif node.node_type == 'streaming':
  2185. properties['mapper'] = node.mapper
  2186. properties['reducer'] = node.reducer
  2187. properties['files'] = [{'value': f} for f in json.loads(node.files)]
  2188. properties['archives'] = json.loads(node.archives)
  2189. properties['job_properties'] = json.loads(node.job_properties)
  2190. properties['prepares'] = json.loads(node.prepares)
  2191. properties['job_xml'] = node.job_xml
  2192. properties['description'] = node.description
  2193. properties['sla'] = node.sla
  2194. properties['sla_enabled'] = node.sla_enabled
  2195. elif node.node_type == 'distcp':
  2196. properties['distcp_parameters'] = json.loads(node.params)
  2197. properties['java_opts'] = node.job_xml
  2198. properties['job_properties'] = json.loads(node.job_properties)
  2199. properties['prepares'] = json.loads(node.prepares)
  2200. properties['description'] = node.description
  2201. properties['sla'] = node.sla
  2202. properties['sla_enabled'] = node.sla_enabled
  2203. wf_nodes.append({
  2204. "id": uuids[node.id],
  2205. "name": '%s-%s' % (node.node_type.split('-')[0], uuids[node.id][:4]),
  2206. "type": "%s-widget" % node.node_type,
  2207. "properties": properties,
  2208. "children":[{('to' if link.name in ('ok', 'start') else link.name): uuids[link.child.get_full_node().id]} for link in node.get_children_links()]
  2209. })
  2210. else:
  2211. _dig_nodes(node)
  2212. _dig_nodes(old_nodes)
  2213. data['workflow']['nodes'] = wf_nodes
  2214. return Workflow(data=json.dumps(data))
  2215. class Coordinator(Job):
  2216. XML_FILE_NAME = 'coordinator.xml'
  2217. PROPERTY_APP_PATH = 'oozie.coord.application.path'
  2218. HUE_ID = 'hue-id-c'
  2219. def __init__(self, data=None, json_data=None, document=None):
  2220. self.document = document
  2221. if document is not None:
  2222. self._data = json.loads(document.data)
  2223. elif json_data is not None:
  2224. self._data = json.loads(json_data)
  2225. elif data is not None:
  2226. self._data = data
  2227. else:
  2228. self._data = {
  2229. 'id': None,
  2230. 'uuid': None,
  2231. 'name': 'My Coordinator',
  2232. 'variables': [], # Aka workflow parameters
  2233. 'properties': {
  2234. 'description': '',
  2235. 'deployment_dir': '',
  2236. 'schema_version': 'uri:oozie:coordinator:0.2',
  2237. 'frequency_number': 1,
  2238. 'frequency_unit': 'days',
  2239. 'cron_frequency': '0 0 * * *',
  2240. 'cron_advanced': False,
  2241. 'timezone': '',
  2242. 'start': '${start_date}',
  2243. 'end': '${end_date}',
  2244. 'workflow': None,
  2245. 'timeout': None,
  2246. 'concurrency': None,
  2247. 'execution': None,
  2248. 'throttle': None,
  2249. 'job_xml': '',
  2250. 'credentials': [],
  2251. 'parameters': [
  2252. {'name': 'oozie.use.system.libpath', 'value': True},
  2253. {'name': 'start_date', 'value': datetime.today().strftime('%Y-%m-%dT%H:%M')},
  2254. {'name': 'end_date', 'value': (datetime.today() + timedelta(days=7)).strftime('%Y-%m-%dT%H:%M')}
  2255. ],
  2256. 'sla': WorkflowConfiguration.SLA_DEFAULT
  2257. }
  2258. }
  2259. @property
  2260. def id(self):
  2261. return self.document.id
  2262. @property
  2263. def uuid(self):
  2264. return self.document.uuid
  2265. def get_data_for_json(self):
  2266. _data = self.data.copy()
  2267. start_date = filter(lambda a: a['name'] == 'start_date', self._data['properties']['parameters'])
  2268. if start_date and type(start_date[0]['value']) == datetime:
  2269. start_date[0]['value'] = start_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
  2270. end_date = filter(lambda a: a['name'] == 'end_date', self._data['properties']['parameters'])
  2271. if end_date and type(end_date[0]['value']) == datetime:
  2272. end_date[0]['value'] = end_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
  2273. return _data
  2274. def to_json(self):
  2275. return json.dumps(self.get_data_for_json())
  2276. def to_json_for_html(self):
  2277. return json.dumps(self.get_data_for_json(), cls=JSONEncoderForHTML)
  2278. @property
  2279. def data(self):
  2280. if type(self._data['properties']['start']) != datetime and not '$' in self._data['properties']['start']:
  2281. self._data['properties']['start'] = parse(self._data['properties']['start'])
  2282. if type(self._data['properties']['end']) != datetime and not '$' in self._data['properties']['end']:
  2283. self._data['properties']['end'] = parse(self._data['properties']['end'])
  2284. if self.document is not None:
  2285. self._data['id'] = self.document.id
  2286. return self._data
  2287. @property
  2288. def name(self):
  2289. return self.data['name']
  2290. def set_workspace(self, user):
  2291. self.data['properties']['deployment_dir'] = Job.get_workspace(user)
  2292. @property
  2293. def deployment_dir(self):
  2294. return self.data['properties']['deployment_dir']
  2295. def find_parameters(self):
  2296. params = set()
  2297. for param in find_dollar_braced_variables(self.name):
  2298. params.add(param)
  2299. for param in find_json_parameters([self.data['properties']]):
  2300. params.add(param)
  2301. for param in find_json_parameters(self.data['variables']):
  2302. if param not in ('MINUTE', 'HOUR', 'DAY', 'MONTH', 'YEAR') and not param.startswith('coord:'):
  2303. params.add(param)
  2304. if self.sla_enabled:
  2305. for param in find_json_parameters(self.sla):
  2306. params.add(param)
  2307. # Get missed params from workflow
  2308. for prop in self.workflow.find_parameters():
  2309. if not prop in params:
  2310. params.add(prop)
  2311. # Remove the ones filled up by coordinator
  2312. removable_names = [ds['workflow_variable'] for ds in self.data['variables']]
  2313. return dict([(param, '') for param in list(params) if param not in removable_names])
  2314. @property
  2315. def sla_enabled(self):
  2316. return self.data['properties']['sla'][0].get('value')
  2317. @property
  2318. def sla(self):
  2319. return self.data['properties']['sla']
  2320. @property
  2321. def parameters(self):
  2322. return self.data['properties']['parameters']
  2323. @property
  2324. def datasets(self):
  2325. return self.inputDatasets + self.outputDatasets
  2326. @property
  2327. def inputDatasets(self):
  2328. return [Dataset(dataset, self) for dataset in self.data['variables'] if dataset['dataset_type'] == 'input_path']
  2329. @property
  2330. def outputDatasets(self):
  2331. return [Dataset(dataset, self) for dataset in self.data['variables'] if dataset['dataset_type'] == 'output_path']
  2332. @property
  2333. def start_server_tz(self):
  2334. return self.data['properties']['start']
  2335. @property
  2336. def end_server_tz(self):
  2337. return self.data['properties']['end']
  2338. @property
  2339. def frequency(self):
  2340. return '${coord:%(unit)s(%(number)d)}' % {'unit': self.data['properties']['frequency_unit'], 'number': self.data['properties']['frequency_number']}
  2341. @property
  2342. def cron_frequency(self):
  2343. data_dict = self.data['properties']
  2344. if 'cron_frequency' in data_dict:
  2345. return data_dict['cron_frequency']
  2346. else:
  2347. # Backward compatibility
  2348. freq = '0 0 * * *'
  2349. if data_dict['frequency_number'] == 1:
  2350. if data_dict['frequency_unit'] == 'minutes':
  2351. freq = '* * * * *'
  2352. elif data_dict['frequency_unit'] == 'hours':
  2353. freq = '0 * * * *'
  2354. elif data_dict['frequency_unit'] == 'days':
  2355. freq = '0 0 * * *'
  2356. elif data_dict['frequency_unit'] == 'months':
  2357. freq = '0 0 0 * *'
  2358. return {'frequency': freq, 'isAdvancedCron': False}
  2359. def to_xml(self, mapping=None):
  2360. if mapping is None:
  2361. mapping = {}
  2362. tmpl = "editor2/gen/coordinator.xml.mako"
  2363. return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})).encode('utf-8', 'xmlcharrefreplace')
  2364. def clear_workflow_params(self):
  2365. # Repopulated in the config properties
  2366. self.data['variables'] = [dataset for dataset in self.data['variables'] if dataset['dataset_type'] != 'parameter']
  2367. @property
  2368. def properties(self):
  2369. props = [{'name': dataset['workflow_variable'], 'value': dataset['dataset_variable']} for dataset in self.data['variables'] if dataset['dataset_type'] == 'parameter']
  2370. props += self.data['properties']['parameters']
  2371. return props
  2372. @property
  2373. def workflow(self):
  2374. if self.document is None:
  2375. raise PopupException(_('Cannot return workflow since document attribute is None.'))
  2376. wf_doc = Document2.objects.get_by_uuid(user=self.document.owner, uuid=self.data['properties']['workflow'])
  2377. return Workflow(document=wf_doc)
  2378. def get_absolute_url(self):
  2379. return reverse('oozie:edit_coordinator') + '?coordinator=%s' % self.id
  2380. @classmethod
  2381. def get_application_path_key(cls):
  2382. return 'oozie.coord.application.path'
  2383. class Dataset():
  2384. def __init__(self, data, coordinator):
  2385. self._data = data
  2386. self.coordinator = coordinator
  2387. @property
  2388. def data(self):
  2389. self._data['name'] = self._data['workflow_variable']
  2390. return self._data
  2391. @property
  2392. def frequency(self):
  2393. if self.data['same_frequency']:
  2394. if self.coordinator.cron_frequency == '* * * * *':
  2395. frequency_unit = 'minutes'
  2396. elif self.coordinator.cron_frequency == '0 * * * *':
  2397. frequency_unit = 'hours'
  2398. elif self.coordinator.cron_frequency == '0 0 * * *':
  2399. frequency_unit = 'days'
  2400. elif self.coordinator.cron_frequency == '0 0 0 * *':
  2401. frequency_unit = 'months'
  2402. else:
  2403. raise PopupException(_('The frequency of the workflow parameter "%s" cannot be guessed from the frequency of the coordinator.'
  2404. ' It so needs to be specified manually.') % self.data['name'])
  2405. frequency_number = 1
  2406. else:
  2407. frequency_unit = self.data['frequency_unit']
  2408. frequency_number = self.data['frequency_number']
  2409. return '${coord:%(unit)s(%(number)s)}' % {'unit': frequency_unit, 'number': frequency_number}
  2410. @property
  2411. def start_server_tz(self):
  2412. if self.data['same_start']:
  2413. return self.coordinator.start_server_tz
  2414. else:
  2415. return convert_to_server_timezone(self.data['start'], self.data['timezone'])
  2416. @property
  2417. def timezone(self):
  2418. if self.data['same_timezone']:
  2419. return self.coordinator.data['properties']['timezone']
  2420. else:
  2421. return self.data['timezone']
  2422. @property
  2423. def start_instance(self):
  2424. if not self.is_advanced_start_instance:
  2425. return int(self.data['advanced_start_instance'])
  2426. else:
  2427. return 0
  2428. @property
  2429. def is_advanced_start_instance(self):
  2430. return not self.is_int(self.data['advanced_start_instance'])
  2431. def is_int(self, text):
  2432. try:
  2433. int(text)
  2434. return True
  2435. except ValueError:
  2436. return False
  2437. @property
  2438. def end_instance(self):
  2439. if not self.is_advanced_end_instance:
  2440. return int(self.data['advanced_end_instance'])
  2441. else:
  2442. return 0
  2443. @property
  2444. def is_advanced_end_instance(self):
  2445. return not self.is_int(self.data['advanced_end_instance'])
  2446. class Bundle(Job):
  2447. XML_FILE_NAME = 'bundle.xml'
  2448. PROPERTY_APP_PATH = 'oozie.bundle.application.path'
  2449. HUE_ID = 'hue-id-b'
  2450. def __init__(self, data=None, json_data=None, document=None):
  2451. self.document = document
  2452. if document is not None:
  2453. self._data = json.loads(document.data)
  2454. elif json_data is not None:
  2455. self._data = json.loads(json_data)
  2456. elif data is not None:
  2457. self._data = data
  2458. else:
  2459. self._data = {
  2460. 'id': None,
  2461. 'uuid': None,
  2462. 'name': 'My Bundle',
  2463. 'coordinators': [],
  2464. 'properties': {
  2465. 'description': '',
  2466. 'deployment_dir': '',
  2467. 'schema_version': 'uri:oozie:bundle:0.2',
  2468. 'kickoff': datetime.today(),
  2469. 'parameters': [{'name': 'oozie.use.system.libpath', 'value': 'true'}]
  2470. }
  2471. }
  2472. @property
  2473. def id(self):
  2474. return self.document.id
  2475. @property
  2476. def uuid(self):
  2477. return self.document.uuid
  2478. def get_data_for_json(self):
  2479. _data = self.data.copy()
  2480. _data['properties']['kickoff'] = _data['properties']['kickoff'].strftime('%Y-%m-%dT%H:%M:%S')
  2481. return _data
  2482. def to_json(self):
  2483. return json.dumps(self.get_data_for_json())
  2484. def to_json_for_html(self):
  2485. return json.dumps(self.get_data_for_json(), cls=JSONEncoderForHTML)
  2486. @property
  2487. def data(self):
  2488. if type(self._data['properties']['kickoff']) == unicode:
  2489. self._data['properties']['kickoff'] = parse(self._data['properties']['kickoff'])
  2490. if self.document is not None:
  2491. self._data['id'] = self.document.id
  2492. return self._data
  2493. def to_xml(self, mapping=None):
  2494. if mapping is None:
  2495. mapping = {}
  2496. mapping.update(dict(list(self.get_coordinator_docs().values('uuid', 'name'))))
  2497. tmpl = "editor2/gen/bundle.xml.mako"
  2498. return force_unicode(
  2499. re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
  2500. 'bundle': self,
  2501. 'mapping': mapping
  2502. })))
  2503. def get_coordinator_docs(self):
  2504. coordinator_ids = [coordinator['coordinator'] for coordinator in self.data['coordinators']]
  2505. return Document2.objects.filter(type='oozie-coordinator2', uuid__in=coordinator_ids)
  2506. def get_coordinator_objects(self):
  2507. return [Coordinator(document=doc) for doc in self.get_coordinator_docs()]
  2508. @property
  2509. def name(self):
  2510. return self.data['name']
  2511. @property
  2512. def parameters(self):
  2513. return self.data['properties']['parameters']
  2514. @property
  2515. def kick_off_time_utc(self):
  2516. return utc_datetime_format(self.data['properties']['kickoff'])
  2517. def set_workspace(self, user):
  2518. self.data['properties']['deployment_dir'] = Job.get_workspace(user)
  2519. @property
  2520. def deployment_dir(self):
  2521. return self.data['properties']['deployment_dir']
  2522. def find_parameters(self):
  2523. params = set()
  2524. for param in find_dollar_braced_variables(self.name):
  2525. params.add(param)
  2526. for coord in self.get_coordinator_objects():
  2527. params.update(coord.find_parameters())
  2528. for param in find_json_parameters([self.data['properties']]):
  2529. params.add(param)
  2530. # Remove the ones filled up by bundle
  2531. removable_names = [p['name'] for coord in self.data['coordinators'] for p in coord['properties']]
  2532. return dict([(param, '') for param in list(params) if param not in removable_names])
  2533. def get_absolute_url(self):
  2534. return reverse('oozie:edit_bundle') + '?bundle=%s' % self.id
  2535. @classmethod
  2536. def get_application_path_key(cls):
  2537. return 'oozie.bundle.application.path'
  2538. class History(object):
  2539. @classmethod
  2540. def get_workflow_from_config(self, conf_dict):
  2541. try:
  2542. doc = Document2.objects.get(type='oozie-workflow2', id=conf_dict.get(Workflow.HUE_ID))
  2543. return Workflow(document=doc)
  2544. except Document2.DoesNotExist:
  2545. pass
  2546. @classmethod
  2547. def get_coordinator_from_config(self, conf_dict):
  2548. try:
  2549. doc = Document2.objects.get(type='oozie-coordinator2', id=conf_dict.get(Coordinator.HUE_ID))
  2550. return Coordinator(document=doc)
  2551. except Document2.DoesNotExist:
  2552. pass
  2553. @classmethod
  2554. def get_bundle_from_config(self, conf_dict):
  2555. try:
  2556. doc = Document2.objects.get(type='oozie-bundle2', id=conf_dict.get(Bundle.HUE_ID))
  2557. return Bundle(document=doc)
  2558. except Document2.DoesNotExist:
  2559. pass
  2560. def _import_workspace(fs, user, job):
  2561. source_workspace_dir = job.deployment_dir
  2562. job.set_workspace(user)
  2563. job.check_workspace(fs, user)
  2564. job.import_workspace(fs, source_workspace_dir, user)
  2565. def _save_workflow(workflow, layout, user, fs=None):
  2566. if workflow.get('id'):
  2567. workflow_doc = Document2.objects.get(id=workflow['id'])
  2568. else:
  2569. workflow_doc = Document2.objects.create(name=workflow['name'], uuid=workflow['uuid'], type='oozie-workflow2', owner=user, description=workflow['properties']['description'])
  2570. Document.objects.link(workflow_doc, owner=workflow_doc.owner, name=workflow_doc.name, description=workflow_doc.description, extra='workflow2')
  2571. # Excludes all the sub-workflow and Hive dependencies. Contains list of history and coordinator dependencies.
  2572. workflow_doc.dependencies = workflow_doc.dependencies.exclude(Q(is_history=False) & Q(type__in=['oozie-workflow2', 'query-hive', 'query-java']))
  2573. dependencies = \
  2574. [node['properties']['workflow'] for node in workflow['nodes'] if node['type'] == 'subworkflow-widget'] + \
  2575. [node['properties']['uuid'] for node in workflow['nodes'] if 'document-widget' in node['type']]
  2576. if dependencies:
  2577. dependency_docs = Document2.objects.filter(uuid__in=dependencies)
  2578. workflow_doc.dependencies.add(*dependency_docs)
  2579. if workflow['properties'].get('imported'): # We convert from and old workflow format (3.8 <) to the latest
  2580. workflow['properties']['imported'] = False
  2581. workflow_instance = Workflow(workflow=workflow, user=user)
  2582. _import_workspace(fs, user, workflow_instance)
  2583. workflow['properties']['deployment_dir'] = workflow_instance.deployment_dir
  2584. workflow_doc.update_data({'workflow': workflow})
  2585. workflow_doc.update_data({'layout': layout})
  2586. workflow_doc1 = workflow_doc.doc.get()
  2587. workflow_doc.name = workflow_doc1.name = workflow['name']
  2588. workflow_doc.description = workflow_doc1.description = workflow['properties']['description']
  2589. workflow_doc.save()
  2590. workflow_doc1.save()
  2591. return workflow_doc
  2592. class WorkflowBuilder():
  2593. """
  2594. Building a workflow that has saved Documents for nodes (e.g Saved Hive query, saved Pig script...).
  2595. """
  2596. def create_workflow(self, user, document=None, documents=None, name=None, managed=False):
  2597. nodes = []
  2598. if documents is None:
  2599. documents = [document]
  2600. if name is None:
  2601. name = _('Schedule of ') + ','.join([document.name or document.type for document in documents])
  2602. for document in documents:
  2603. if document.type == 'query-java':
  2604. node = self.get_java_document_node(document, name)
  2605. else:
  2606. node = self.get_hive_document_node(document, name, user)
  2607. nodes.append(node)
  2608. workflow_doc = self.get_workflow(nodes, name, document.uuid, user, managed=managed)
  2609. workflow_doc.dependencies.add(*documents)
  2610. return workflow_doc
  2611. def get_hive_document_node(self, document, name, user):
  2612. api = get_oozie(user)
  2613. credentials = [HiveDocumentAction.DEFAULT_CREDENTIALS] if api.security_enabled else []
  2614. notebook = Notebook(document=document)
  2615. parameters = find_dollar_braced_variables(notebook.get_str())
  2616. parameters = [{u'value': u'%s=${%s}' % (p, p)} for p in parameters]
  2617. return {
  2618. u'name': u'doc-hive-%s' % document.uuid[:4],
  2619. u'id': str(uuid.uuid4()),
  2620. u'type': u'hive-document-widget',
  2621. u'properties': {
  2622. u'files': [],
  2623. u'job_xml': u'',
  2624. u'uuid': document.uuid,
  2625. u'parameters': parameters,
  2626. u'retry_interval': [],
  2627. u'retry_max': [],
  2628. u'job_properties': [],
  2629. u'sla': [
  2630. {u'key': u'enabled', u'value': False},
  2631. {u'key': u'nominal-time', u'value': u'${nominal_time}'},
  2632. {u'key': u'should-start', u'value': u''},
  2633. {u'key': u'should-end', u'value': u'${30 * MINUTES}'},
  2634. {u'key': u'max-duration', u'value': u''},
  2635. {u'key': u'alert-events', u'value': u''},
  2636. {u'key': u'alert-contact', u'value': u''},
  2637. {u'key': u'notification-msg', u'value': u''},
  2638. {u'key': u'upstream-apps', u'value': u''},
  2639. ],
  2640. u'archives': [],
  2641. u'prepares': [],
  2642. u'credentials': credentials,
  2643. u'password': u'',
  2644. u'jdbc_url': u'',
  2645. },
  2646. u'children': [
  2647. {u'to': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a'},
  2648. {u'error': u'17c9c895-5a16-7443-bb81-f34b30b21548'
  2649. }],
  2650. u'actionParameters': [],
  2651. }
  2652. def get_java_document_node(self, document, name):
  2653. credentials = []
  2654. return {
  2655. "id": str(uuid.uuid4()),
  2656. 'name': u'doc-hive-%s' % document.uuid[:4],
  2657. "type":"java-document-widget",
  2658. "properties":{
  2659. u'uuid': document.uuid, # Files, main_class, arguments comes from there
  2660. "job_xml":[],
  2661. "jar_path": "",
  2662. "java_opts":[],
  2663. "retry_max":[],
  2664. "retry_interval":[],
  2665. "job_properties":[],
  2666. "capture_output": False,
  2667. "prepares":[],
  2668. "credentials": credentials,
  2669. "sla":[{"value":False, "key":"enabled"}, {"value":"${nominal_time}", "key":"nominal-time"}, {"value":"", "key":"should-start"}, {"value":"${30 * MINUTES}", "key":"should-end"}, {"value":"", "key":"max-duration"}, {"value":"", "key":"alert-events"}, {"value":"", "key":"alert-contact"}, {"value":"", "key":"notification-msg"}, {"value":"", "key":"upstream-apps"}],
  2670. "archives":[]
  2671. },
  2672. "children":[
  2673. {"to":"33430f0f-ebfa-c3ec-f237-3e77efa03d0a"},
  2674. {"error":"17c9c895-5a16-7443-bb81-f34b30b21548"}
  2675. ],
  2676. "actionParameters":[],
  2677. "actionParametersFetched": False
  2678. }
  2679. def get_workflow(self, nodes, name, doc_uuid, user, managed=False):
  2680. parameters = []
  2681. data = {
  2682. 'workflow': {
  2683. u'name': name,
  2684. u'nodes': [{
  2685. u'name': u'Start',
  2686. u'properties': {},
  2687. u'actionParametersFetched': False,
  2688. u'id': u'3f107997-04cc-8733-60a9-a4bb62cebffc',
  2689. u'type': u'start-widget',
  2690. u'children': [{u'to': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a'}],
  2691. u'actionParameters': [],
  2692. }, {
  2693. u'name': u'End',
  2694. u'properties': {},
  2695. u'actionParametersFetched': False,
  2696. u'id': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a',
  2697. u'type': u'end-widget',
  2698. u'children': [],
  2699. u'actionParameters': [],
  2700. }, {
  2701. u'name': u'Kill',
  2702. u'properties': {
  2703. u'body': u'',
  2704. u'cc': u'',
  2705. u'to': u'',
  2706. u'enableMail': False,
  2707. u'message': u'Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]',
  2708. u'subject': u'',
  2709. },
  2710. u'actionParametersFetched': False,
  2711. u'id': u'17c9c895-5a16-7443-bb81-f34b30b21548',
  2712. u'type': u'kill-widget',
  2713. u'children': [],
  2714. u'actionParameters': [],
  2715. }
  2716. ],
  2717. u'properties': {
  2718. u'job_xml': u'',
  2719. u'description': u'',
  2720. u'wf1_id': None,
  2721. u'sla_enabled': False,
  2722. u'deployment_dir': Job.get_workspace(user),
  2723. u'schema_version': u'uri:oozie:workflow:0.5',
  2724. u'sla': [
  2725. {u'key': u'enabled', u'value': False},
  2726. {u'key': u'nominal-time', u'value': u'${nominal_time}'},
  2727. {u'key': u'should-start', u'value': u''},
  2728. {u'key': u'should-end', u'value': u'${30 * MINUTES}'},
  2729. {u'key': u'max-duration', u'value': u''},
  2730. {u'key': u'alert-events', u'value': u''},
  2731. {u'key': u'alert-contact', u'value': u''},
  2732. {u'key': u'notification-msg', u'value': u''},
  2733. {u'key': u'upstream-apps', u'value': u''},
  2734. ],
  2735. u'show_arrows': True,
  2736. u'parameters': parameters,
  2737. u'properties': [],
  2738. },
  2739. u'uuid': str(uuid.uuid4()),
  2740. }
  2741. }
  2742. _prev_node = data['workflow']['nodes'][0]
  2743. for node in nodes:
  2744. data['workflow']['nodes'].append(node)
  2745. _prev_node['children'][0]['to'] = node['id'] # We link nodes
  2746. _prev_node = node
  2747. workflow_doc = _save_workflow(data['workflow'], {}, user)
  2748. workflow_doc.is_managed = managed
  2749. workflow_doc.save()
  2750. return workflow_doc