| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140 |
- #!/usr/bin/env python
- # Licensed to Cloudera, Inc. under one
- # or more contributor license agreements. See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership. Cloudera, Inc. licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License. You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- import json
- import logging
- import re
- import time
- import uuid
- from datetime import datetime, timedelta
- from dateutil.parser import parse
- from string import Template
- from django.core.urlresolvers import reverse
- from django.db.models import Q
- from django.utils.encoding import force_unicode
- from django.utils.translation import ugettext as _
- from django.contrib.auth.models import User
- from desktop.conf import USE_DEFAULT_CONFIGURATION
- from desktop.lib import django_mako
- from desktop.lib.exceptions_renderable import PopupException
- from desktop.lib.i18n import smart_str
- from desktop.lib.json_utils import JSONEncoderForHTML
- from desktop.models import DefaultConfiguration, Document2, Document
- from hadoop.fs.hadoopfs import Hdfs
- from hadoop.fs.exceptions import WebHdfsException
- from liboozie.oozie_api import get_oozie
- from liboozie.submission2 import Submission
- from liboozie.submission2 import create_directories
- from notebook.models import Notebook
- from oozie.conf import REMOTE_SAMPLE_DIR
- from oozie.utils import utc_datetime_format, UTC_TIME_FORMAT, convert_to_server_timezone
- from oozie.importlib.workflows import generate_v2_graph_nodes, MalformedWfDefException, InvalidTagWithNamespaceException
- LOG = logging.getLogger(__name__)
- class Job(object):
- def find_all_parameters(self, with_lib_path=True):
- params = self.find_parameters()
- for param in self.parameters:
- params[param['name'].strip()] = param['value']
- if params.get('nominal_time') == '':
- params['nominal_time'] = datetime.today().strftime(UTC_TIME_FORMAT)
- return [{'name': name, 'value': value} for name, value in params.iteritems() if with_lib_path or name != 'oozie.use.system.libpath']
- @classmethod
- def get_workspace(cls, user):
- if not isinstance(user, basestring):
- user = user.username
- return (REMOTE_SAMPLE_DIR.get() + '/hue-oozie-$TIME').replace('$USER', user).replace('$TIME', str(time.time()))
- @property
- def validated_name(self):
- good_name = []
- for c in self.name[:40]:
- if not good_name:
- if not re.match('[a-zA-Z_\{\$\}]', c):
- c = '_'
- else:
- if not re.match('[\-_a-zA-Z0-9\{\$\}]', c):
- c = '_'
- good_name.append(c)
- return ''.join(good_name)
- def __str__(self):
- return '%s' % force_unicode(self.name)
- def deployment_dir(self):
- return None
- def check_workspace(self, fs, user):
- # Create optional default root workspace for the first submission
- if REMOTE_SAMPLE_DIR.get() == REMOTE_SAMPLE_DIR.config.default_value:
- create_directories(fs, [REMOTE_SAMPLE_DIR.get()])
- Submission(user, self, fs, None, {})._create_dir(self.deployment_dir)
- Submission(user, self, fs, None, {})._create_dir(Hdfs.join(self.deployment_dir, 'lib'))
- def import_workspace(self, fs, source_deployment_dir, owner):
- try:
- fs.copy_remote_dir(source_deployment_dir, self.deployment_dir, owner=owner)
- except WebHdfsException, e:
- msg = _('The copy of the deployment directory failed: %s.') % e
- LOG.error(msg)
- raise PopupException(msg)
- class WorkflowConfiguration(object):
- APP_NAME = 'oozie-workflow'
- SLA_DEFAULT = [
- {'key': 'enabled', 'value': False}, # Always first element
- {'key': 'nominal-time', 'value': '${nominal_time}'},
- {'key': 'should-start', 'value': ''},
- {'key': 'should-end', 'value': '${30 * MINUTES}'},
- {'key': 'max-duration', 'value': ''},
- {'key': 'alert-events', 'value': ''},
- {'key': 'alert-contact', 'value': ''},
- {'key': 'notification-msg', 'value': ''},
- {'key': 'upstream-apps', 'value': ''},
- ]
- PROPERTIES = [
- {
- "multiple": True,
- "defaultValue": [
- {
- 'name': 'oozie.use.system.libpath',
- 'value': True
- }
- ],
- "value": [
- {
- 'name': 'oozie.use.system.libpath',
- 'value': True
- }
- ],
- "nice_name": _("Variables"),
- "key": "parameters",
- "help_text": _("Add one or more Oozie workflow job parameters."),
- "type": "parameters"
- }, {
- "multiple": False,
- "defaultValue": '',
- "value": '',
- "nice_name": _("Workspace"),
- "key": "deployment_dir",
- "help_text": _("Specify the deployment directory."),
- "type": "hdfs-file"
- }, {
- "multiple": True,
- "defaultValue": [],
- "value": [],
- "nice_name": _("Hadoop Properties"),
- "key": "properties",
- "help_text": _("Hadoop configuration properties."),
- "type": "settings"
- }, {
- "multiple": False,
- "defaultValue": True,
- "value": True,
- "nice_name": _("Show graph arrows"),
- "key": "show_arrows",
- "help_text": _("Toggles display of graph arrows."),
- "type": "boolean"
- }, {
- "multiple": False,
- "defaultValue": "uri:oozie:workflow:0.5",
- "value": "uri:oozie:workflow:0.5",
- "nice_name": _("Version"),
- "key": "schema_version",
- "help_text": _("Oozie XML Schema Version"),
- "type": "string",
- "options": [
- "uri:oozie:workflow:0.5",
- "uri:oozie:workflow:0.4.5",
- "uri:oozie:workflow:0.4",
- ]
- }, {
- "multiple": False,
- "defaultValue": '',
- "value": '',
- "nice_name": _("Job XML"),
- "key": "job_xml",
- "help_text": _("Oozie Job XML file"),
- "type": "hdfs-file"
- }, {
- "multiple": False,
- "defaultValue": False,
- "value": False,
- "nice_name": _("SLA Enabled"),
- "key": "sla_enabled",
- "help_text": _("SLA Enabled"),
- "type": "boolean"
- }, {
- "multiple": False,
- "defaultValue": SLA_DEFAULT,
- "value": SLA_DEFAULT,
- "nice_name": _("SLA Configuration"),
- "key": "sla",
- "help_text": _("Oozie SLA properties"),
- "type": "settings",
- "options": [prop['key'] for prop in SLA_DEFAULT]
- }
- ]
- class Workflow(Job):
- XML_FILE_NAME = 'workflow.xml'
- PROPERTY_APP_PATH = 'oozie.wf.application.path'
- HUE_ID = 'hue-id-w'
- def __init__(self, data=None, document=None, workflow=None, user=None):
- self.document = document
- self.user = user
- if document is not None:
- self.data = document.data
- elif data is not None:
- self.data = data
- else:
- if not workflow:
- workflow = self.get_default_workflow()
- workflow['properties'] = self.get_workflow_properties_for_user(user, workflow)
- self.data = json.dumps({
- 'layout': [{
- "size":12, "rows":[
- {"widgets":[{"size":12, "name":"Start", "id":"3f107997-04cc-8733-60a9-a4bb62cebffc", "widgetType":"start-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]},
- {"widgets":[{"size":12, "name":"End", "id":"33430f0f-ebfa-c3ec-f237-3e77efa03d0a", "widgetType":"end-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]},
- {"widgets":[{"size":12, "name":"Kill", "id":"17c9c895-5a16-7443-bb81-f34b30b21548", "widgetType":"kill-widget", "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span12"}]}
- ],
- "drops":[ "temp"],
- "klass":"card card-home card-column span12"
- }],
- 'workflow': workflow
- })
- @classmethod
- def get_application_path_key(cls):
- return 'oozie.wf.application.path'
- @classmethod
- def gen_workflow_data_from_xml(cls, user, oozie_workflow):
- node_list = []
- try:
- node_list = generate_v2_graph_nodes(oozie_workflow.definition)
- except MalformedWfDefException, e:
- LOG.exception("Could not find any nodes in Workflow definition. Maybe it's malformed?")
- except InvalidTagWithNamespaceException, e:
- LOG.exception(
- "Tag with namespace %(namespace)s is not valid. Please use one of the following namespaces: %(namespaces)s" % {
- 'namespace': e.namespace,
- 'namespaces': e.namespaces
- })
- _to_lowercase(node_list)
- adj_list = _create_graph_adjaceny_list(node_list)
- node_hierarchy = ['start']
- _get_hierarchy_from_adj_list(adj_list, adj_list['start']['ok_to'], node_hierarchy)
- _update_adj_list(adj_list)
- wf_rows = _create_workflow_layout(node_hierarchy, adj_list)
- data = {'layout': [{}], 'workflow': {}}
- if wf_rows:
- data['layout'][0]['rows'] = wf_rows
- wf_nodes = []
- _dig_nodes(node_hierarchy, adj_list, user, wf_nodes)
- data['workflow']['nodes'] = wf_nodes
- data['workflow']['id'] = '123'
- data['workflow']['properties'] = cls.get_workflow_properties_for_user(user, workflow=None)
- data['workflow']['properties'].update({
- 'deployment_dir': '/user/hue/oozie/workspaces/hue-oozie-1452553957.19'
- })
- return data
- @classmethod
- def get_default_workflow(cls):
- return {
- "id": None,
- "uuid": None,
- "name": "My Workflow",
- "nodes": [
- {"id": "3f107997-04cc-8733-60a9-a4bb62cebffc", "name": "Start", "type": "start-widget", "properties": {},
- "children": [{'to': '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'}]},
- {"id": "33430f0f-ebfa-c3ec-f237-3e77efa03d0a", "name": "End", "type": "end-widget", "properties": {},
- "children": []},
- {"id": "17c9c895-5a16-7443-bb81-f34b30b21548", "name": "Kill", "type": "kill-widget",
- "properties": {'message': _('Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]')},
- "children": []}
- ]
- }
- @classmethod
- def get_workflow_properties_for_user(cls, user, workflow=None):
- workflow = workflow if workflow is not None else {}
- properties = workflow.get('properties', None)
- if not properties:
- config = None
- properties = cls.get_properties()
- if user is not None:
- if USE_DEFAULT_CONFIGURATION.get():
- config = DefaultConfiguration.objects.get_configuration_for_user(app=WorkflowConfiguration.APP_NAME, user=user)
- if config is not None:
- properties.update(config.properties_dict)
- properties.update({
- 'wf1_id': None,
- 'description': ''
- })
- return properties
- @staticmethod
- def get_properties():
- return dict((prop['key'], prop['value']) for prop in WorkflowConfiguration.PROPERTIES)
- @property
- def id(self):
- return self.document.id
- @property
- def uuid(self):
- return self.document.uuid
- @property
- def name(self):
- _data = self.get_data()
- return _data['workflow']['name']
- @property
- def deployment_dir(self):
- _data = self.get_data()
- return _data['workflow']['properties']['deployment_dir']
- @property
- def parameters(self):
- _data = self.get_data()
- return _data['workflow']['properties']['parameters']
- @property
- def sla_enabled(self):
- _data = self.get_data()
- return _data['workflow']['properties']['sla'][0].get('value')
- @property
- def has_some_slas(self):
- return self.sla_enabled or any([node.sla_enabled for node in self.nodes])
- @property
- def credentials(self):
- return list(set([cred for node in self.nodes for cred in node.data['properties']['credentials']]))
- @property
- def sla(self):
- _data = self.get_data()
- return _data['workflow']['properties']['sla']
- @property
- def nodes(self):
- _data = self.get_data()
- return [Node(node, self.user) for node in _data['workflow']['nodes']]
- def find_parameters(self):
- params = set()
- for param in find_dollar_braced_variables(self.name):
- params.add(param)
- if self.sla_enabled:
- for param in find_json_parameters(self.sla):
- params.add(param)
- for node in self.nodes:
- params.update(node.find_parameters())
- return dict([(param, '') for param in list(params)])
- def get_json(self):
- _data = self.get_data()
- return json.dumps(_data)
- def get_data(self):
- _data = json.loads(self.data)
- if self.document is not None:
- _data['workflow']['id'] = self.document.id
- _data['workflow']['dependencies'] = list(self.document.dependencies.values('uuid', ))
- else:
- _data['workflow']['dependencies'] = []
- if 'parameters' not in _data['workflow']['properties']:
- _data['workflow']['properties']['parameters'] = [
- {'name': 'oozie.use.system.libpath', 'value': True},
- ]
- if 'show_arrows' not in _data['workflow']['properties']:
- _data['workflow']['properties']['show_arrows'] = True
- for node in _data['workflow']['nodes']:
- if 'credentials' in node['properties']: # If node is an Action
- if 'retry_max' not in node['properties']: # When displaying a workflow
- node['properties']['retry_max'] = []
- if 'retry_interval' not in node['properties']:
- node['properties']['retry_interval'] = []
- # Backward compatibility
- _upgrade_older_node(node)
- return _data
- def to_xml(self, mapping=None):
- if mapping is None:
- mapping = {}
- tmpl = 'editor2/gen/workflow.xml.mako'
- data = self.get_data()
- nodes = [node for node in self.nodes if node.name != 'End'] + [node for node in self.nodes if
- node.name == 'End'] # End at the end
- node_mapping = dict([(node.id, node) for node in nodes])
- sub_wfs_ids = [node.data['properties']['workflow'] for node in nodes if node.data['type'] == 'subworkflow']
- workflow_mapping = dict(
- [(workflow.uuid, Workflow(document=workflow, user=self.user)) for workflow in Document2.objects.filter(uuid__in=sub_wfs_ids)])
- xml = re.sub(re.compile('>\s*\n+', re.MULTILINE), '>\n', django_mako.render_to_string(tmpl, {
- 'wf': self,
- 'workflow': data['workflow'],
- 'nodes': nodes,
- 'mapping': mapping,
- 'node_mapping': node_mapping,
- 'workflow_mapping': workflow_mapping
- }))
- return force_unicode(xml.strip())
- def get_absolute_url(self):
- return reverse('oozie:edit_workflow') + '?workflow=%s' % self.id
- def override_subworkflow_id(self, sub_wf_action, workflow_id):
- _data = self.get_data()
- action = [_action for _action in _data['workflow']['nodes'] if _action['id'] == sub_wf_action.id]
- if action:
- action[0]['properties']['job_properties'].append({'name': Workflow.HUE_ID, 'value': workflow_id})
- self.data = json.dumps(_data)
- def update_name(self, name):
- _data = self.get_data()
- _data['workflow']['name'] = name
- self.data = json.dumps(_data)
- def set_workspace(self, user):
- _data = json.loads(self.data)
- _data['workflow']['properties']['deployment_dir'] = Job.get_workspace(user)
- self.data = json.dumps(_data)
- def create_single_action_workflow_data(self, node_id):
- _data = json.loads(self.data)
- start_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'Start'][0]
- submit_node = [node for node in _data['workflow']['nodes'] if node['id'] == node_id][0]
- end_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'End'][0]
- kill_node = [node for node in _data['workflow']['nodes'] if node['name'] == 'Kill'][0]
- # Modify children to point Start -> Submit_node -> End/Kill
- start_node['children'] = [{'to': submit_node['id']}]
- submit_node['children'] = [{'to': end_node['id']}, {'error': kill_node['id']}]
- _data['workflow']['properties']['deployment_dir'] = None
- # Recursively find the widget node
- def _get_node(rows, node_id):
- for row in rows:
- if not row['widgets']:
- for col in row['columns']:
- node = _get_node(col['rows'], node_id)
- if node:
- return node
- elif row['widgets'][0]['id'] == node_id:
- return row
- # Create wf data with above nodes
- return json.dumps({
- 'layout': [{
- "size": 12,
- "rows": [
- [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'Start'][0],
- _get_node(_data['layout'][0]['rows'], node_id),
- [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'End'][0],
- [row for row in _data['layout'][0]['rows'] if row['widgets'] and row['widgets'][0]['name'] == 'Kill'][0]
- ],
- "drops": ["temp"],
- "klass": "card card-home card-column span12"
- }],
- 'workflow': {
- "id": None,
- "uuid": None,
- "name": _data['workflow']['name'],
- "properties": _data['workflow']['properties'],
- "nodes": [start_node, submit_node, end_node, kill_node]
- }
- })
- # Updates node_list to lowercase names
- # To avoid case-sensitive failures
- def _to_lowercase(node_list):
- for node in node_list:
- for key in node.keys():
- if hasattr(node[key], 'lower'):
- node[key] = node[key].lower()
- def _update_adj_list(adj_list):
- uuids = {}
- id = 1
- first_kill_node_seen = False
- for node in adj_list.keys():
- adj_list[node]['id'] = id
- # Oozie uses same action for streaming and mapreduce but Hue manages them differently
- if adj_list[node]['node_type'] == 'map-reduce':
- if 'streaming' in adj_list[node]['name']:
- adj_list[node]['node_type'] = 'streaming'
- else:
- adj_list[node]['node_type'] = 'mapreduce'
- elif adj_list[node]['node_type'] == 'sub-workflow':
- adj_list[node]['node_type'] = 'subworkflow'
- if adj_list[node]['node_type'] == 'kill':
- # JS requires at least one of the kill nodes to have this Id
- if not first_kill_node_seen:
- adj_list[node]['uuid'] = '17c9c895-5a16-7443-bb81-f34b30b21548'
- first_kill_node_seen = True
- else:
- adj_list[node]['uuid'] = str(uuid.uuid4())
- elif adj_list[node]['node_type'] == 'start':
- adj_list[node]['uuid'] = '3f107997-04cc-8733-60a9-a4bb62cebffc'
- elif adj_list[node]['node_type'] == 'end':
- adj_list[node]['uuid'] = '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'
- else:
- adj_list[node]['uuid'] = node[-4:] + str(uuid.uuid4())[4:]
- uuids[id] = adj_list[node]['uuid']
- id += 1
- return adj_list
- def _dig_nodes(nodes, adj_list, user, wf_nodes):
- for node in nodes:
- if type(node) != list:
- node = adj_list[node]
- properties = {}
- if '%s-widget' % node['node_type'] in NODES:
- properties = dict(NODES['%s-widget' % node['node_type']].get_fields())
- if node['node_type'] == 'pig':
- properties['script_path'] = node.get('pig').get('script_path')
- elif node['node_type'] == 'spark':
- properties['class'] = node.get('spark').get('class')
- properties['jars'] = node.get('spark').get('jar')
- elif node['node_type'] == 'hive' or node['node_type'] == 'hive2':
- properties['script_path'] = node.get('hive').get('script')
- elif node['node_type'] == 'java':
- properties['main_class'] = node.get('java').get('main-class')
- elif node['node_type'] == 'sqoop':
- properties['command'] = node.get('sqoop').get('command')
- elif node['node_type'] == 'mapreduce':
- properties['job_properties'] = node.get('job_properties')
- elif node['node_type'] == 'shell':
- properties['shell_command'] = node.get('shell').get('command')
- elif node['node_type'] == 'ssh':
- properties['user'] = '%s@%s' % (node.get('ssh').get('user'), node.get('ssh').get('host'))
- properties['ssh_command'] = node.get('ssh').get('command')
- elif node['node_type'] == 'fs':
- fs_props = node.get('fs')
- # TBD: gather props for different fs operations
- elif node['node_type'] == 'email':
- properties['to'] = node.get('email').get('to')
- properties['subject'] = node.get('email').get('subject')
- #TBD: body doesn't show up
- properties['body'] = node.get('email').get('body')
- elif node['node_type'] == 'streaming':
- properties['mapper'] = node.get('streaming').get('mapper')
- properties['reducer'] = node.get('streaming').get('reducer')
- elif node['node_type'] == 'distcp':
- properties['distcp_parameters'] = node.get('params')
- elif node['node_type'] == 'subworkflow':
- properties['app-path'] = node.get('subworkflow').get('app-path')
- properties['workflow'] = node.get('uuid')
- properties['job_properties'] = []
- properties['sla'] = ''
- children = []
- if node['node_type'] in ('fork', 'decision'):
- for key in node.keys():
- if key.startswith('path'):
- children.append({'to': adj_list[node[key]]['uuid'], 'condition': '${ 1 gt 0 }'})
- if node['node_type'] == 'decision':
- children.append({'to': adj_list[node['default']]['uuid'], 'condition': '${ 1 gt 0 }'})
- else:
- if node.get('ok_to'):
- children.append({'to': adj_list[node['ok_to']]['uuid']})
- if node.get('error_to'):
- children.append({'error': adj_list[node['error_to']]['uuid']})
- wf_nodes.append({
- "id": node['uuid'],
- "name": '%s-%s' % (node['node_type'].split('-')[0], node['uuid'][:4]),
- "type": "%s-widget" % node['node_type'],
- "properties": properties,
- "children": children
- })
- else:
- _dig_nodes(node, adj_list, user, wf_nodes)
- def _create_workflow_layout(nodes, adj_list, size=12):
- wf_rows = []
- for node in nodes:
- if type(node) == list and len(node) == 1:
- node = node[0]
- if type(node) != list:
- wf_rows.append({"widgets":[{"size":size, "name": adj_list[node]['node_type'], "id": adj_list[node]['uuid'], "widgetType": "%s-widget" % adj_list[node]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- else:
- if adj_list[node[0]]['node_type'] in ('fork', 'decision'):
- wf_rows.append({"widgets":[{"size":size, "name": adj_list[node[0]]['name'], "id": adj_list[node[0]]['uuid'], "widgetType": "%s-widget" % adj_list[node[0]]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- wf_rows.append({
- "id": str(uuid.uuid4()),
- "widgets":[
- ],
- "columns":[
- {
- "id": str(uuid.uuid4()),
- "size": (size / len(node[1])),
- "rows":
- [{
- "id": str(uuid.uuid4()),
- "widgets": c['widgets'],
- "columns":c.get('columns') or []
- } for c in col],
- "klass":"card card-home card-column span%s" % (size / len(node[1]))
- }
- for col in [_create_workflow_layout(item, adj_list, size) for item in node[1]]
- ]
- })
- if adj_list[node[0]]['node_type'] == 'fork':
- wf_rows.append({"widgets":[{"size":size, "name": adj_list[node[2]]['name'], "id": adj_list[node[2]]['uuid'], "widgetType": "%s-widget" % adj_list[node[2]]['node_type'], "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- else:
- wf_rows.append(_create_workflow_layout(node, adj_list, size))
- return wf_rows
- def _get_hierarchy_from_adj_list(adj_list, curr_node, node_hierarchy):
- _get_hierarchy_from_adj_list_helper(adj_list, curr_node, node_hierarchy)
- # Add End and Kill nodes to node_hierarchy
- for key in adj_list.keys():
- if adj_list[key]['node_type'] == 'kill':
- node_hierarchy.append([adj_list[key]['name']])
- node_hierarchy.append([adj_list[key]['name'] for key in adj_list.keys() if adj_list[key]['node_type'] == 'end'])
- def _get_hierarchy_from_adj_list_helper(adj_list, curr_node, node_hierarchy):
- if not curr_node or adj_list[curr_node]['node_type'] in ('join', 'end', 'kill'):
- return curr_node
- elif adj_list[curr_node]['node_type'] in ('fork', 'decision'):
- branch_nodes = []
- branch_nodes.append(curr_node)
- join_node = None
- children = []
- for key in adj_list[curr_node].keys():
- if key.startswith('path'):
- child = []
- return_node = _get_hierarchy_from_adj_list_helper(adj_list, adj_list[curr_node][key], child)
- join_node = return_node if not join_node else join_node
- if child:
- children.append(child)
- branch_nodes.append(children)
- if adj_list[curr_node]['node_type'] == 'fork':
- branch_nodes.append(join_node)
- node_hierarchy.append(branch_nodes)
- return _get_hierarchy_from_adj_list_helper(adj_list, adj_list[join_node]['ok_to'], node_hierarchy)
- node_hierarchy.append(branch_nodes)
- return join_node
- else:
- node_hierarchy.append(curr_node)
- return _get_hierarchy_from_adj_list_helper(adj_list, adj_list[curr_node]['ok_to'], node_hierarchy)
- def _create_graph_adjaceny_list(nodes):
- start_node = [node for node in nodes if node.get('node_type') == 'start'][0]
- adj_list = {'start': start_node}
- for node in nodes:
- if node and node.get('node_type') != 'start':
- adj_list[node['name']] = node
- return adj_list
- class Node():
- def __init__(self, data, user=None):
- self.data = data
- self.user = user
- self._augment_data()
- def to_xml(self, mapping=None, node_mapping=None, workflow_mapping=None):
- if mapping is None:
- mapping = {}
- if node_mapping is None:
- node_mapping = {}
- if workflow_mapping is None:
- workflow_mapping = {}
- if self.data['type'] in ('hive2', 'hive-document') and not self.data['properties']['jdbc_url']:
- self.data['properties']['jdbc_url'] = _get_hiveserver2_url()
- if self.data['type'] == 'fork':
- links = [link for link in self.data['children'] if link['to'] in node_mapping]
- if len(links) != len(self.data['children']):
- LOG.warn('Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s' \
- % (len(links), len(self.data['children']), links, self.data['children']))
- self.data['children'] = links
- if self.data['type'] == JavaDocumentAction.TYPE:
- notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
- properties = notebook.get_data()['snippets'][0]['properties']
- self.data['properties']['main_class'] = properties['class']
- self.data['properties']['app_jar'] = properties['app_jar'] # Not used here
- self.data['properties']['files'] = [{'value': f['path']} for f in properties['files']]
- self.data['properties']['arguments'] = [{'value': prop} for prop in properties['arguments']]
- data = {
- 'node': self.data,
- 'mapping': mapping,
- 'node_mapping': node_mapping,
- 'workflow_mapping': workflow_mapping
- }
- return django_mako.render_to_string(self.get_template_name(), data)
- @property
- def id(self):
- return self.data['id']
- @property
- def name(self):
- return self.data['name']
- @property
- def sla_enabled(self):
- return 'sla' in self.data['properties'] and self.data['properties']['sla'] and self.data['properties']['sla'][0].get('value')
- def _augment_data(self):
- self.data['type'] = self.data['type'].replace('-widget', '')
- self.data['uuid'] = self.data['id']
- # Action Node
- if 'credentials' not in self.data['properties']:
- self.data['properties']['credentials'] = []
- if 'prepares' not in self.data['properties']:
- self.data['properties']['prepares'] = []
- if 'job_xml' not in self.data['properties']:
- self.data['properties']['job_xml'] = []
- if 'properties' not in self.data['properties']:
- self.data['properties']['properties'] = []
- if 'params' not in self.data['properties']:
- self.data['properties']['params'] = []
- if 'files' not in self.data['properties']:
- self.data['properties']['files'] = []
- if 'archives' not in self.data['properties']:
- self.data['properties']['archives'] = []
- if 'sla' not in self.data['properties']:
- self.data['properties']['sla'] = WorkflowConfiguration.SLA_DEFAULT
- if 'retry_max' not in self.data['properties']:
- self.data['properties']['retry_max'] = []
- if 'retry_interval' not in self.data['properties']:
- self.data['properties']['retry_interval'] = []
- # Backward compatibility
- _upgrade_older_node(self.data)
- def get_template_name(self):
- node_type = self.data['type']
- if self.data['type'] == JavaDocumentAction.TYPE:
- node_type = JavaAction.TYPE
- return 'editor2/gen/workflow-%s.xml.mako' % node_type
- def find_parameters(self):
- return find_parameters(self) + (find_parameters(self, ['sla']) if self.sla_enabled else [])
- def _upgrade_older_node(node):
- if node['type'] in ('sqoop', 'sqoop-widget') and 'arguments' not in node['properties']:
- node['properties']['arguments'] = node['properties']['parameters']
- if node['type'] in ('kill', 'kill-widget') and 'to' not in node['properties']:
- node['properties']['enableMail'] = False
- node['properties']['to'] = ''
- node['properties']['cc'] = ''
- node['properties']['subject'] = ''
- node['properties']['body'] = ''
- if node['type'] == 'email-widget' and 'bcc' not in node['properties']:
- node['properties']['bcc'] = ''
- node['properties']['content_type'] = 'text/plain'
- node['properties']['attachment'] = ''
- if node['type'] == 'spark-widget' and 'files' not in node['properties']:
- node['properties']['files'] = []
- class Action(object):
- @classmethod
- def get_fields(cls):
- credentials = [cls.DEFAULT_CREDENTIALS] if hasattr(cls, 'DEFAULT_CREDENTIALS') else []
- return [(f['name'], f['value']) for f in cls.FIELDS.itervalues()] + [('sla', WorkflowConfiguration.SLA_DEFAULT), ('credentials', credentials)]
- class StartNode(Action):
- TYPE = 'start'
- FIELDS = {}
- class EndNode(Action):
- TYPE = 'end'
- FIELDS = {}
- class PigAction(Action):
- TYPE = 'pig'
- FIELDS = {
- 'script_path': {
- 'name': 'script_path',
- 'label': _('Script'),
- 'value': '',
- 'help_text': _('Path to the script on HDFS.'),
- 'type': ''
- },
- 'parameters': {
- 'name': 'parameters',
- 'label': _('Parameters'),
- 'value': [],
- 'help_text': _('The Pig parameters of the script without -param. e.g. INPUT=${inputDir}'),
- 'type': ''
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('The Pig parameters of the script as is. e.g. -param, INPUT=${inputDir}'),
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': [],
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['script_path']]
- class JavaAction(Action):
- TYPE = 'java'
- FIELDS = {
- 'jar_path': {
- 'name': 'jar_path',
- 'label': _('Jar name'),
- 'value': '',
- 'help_text': _('Path to the jar on HDFS.'),
- 'type': ''
- },
- 'main_class': {
- 'name': 'main_class',
- 'label': _('Main class'),
- 'value': '',
- 'help_text': _('Java class. e.g. org.apache.hadoop.examples.Grep'),
- 'type': 'text'
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('Arguments of the main method. The value of each arg element is considered a single argument '
- 'and they are passed to the main method in the same order.'),
- 'type': ''
- },
- 'java_opts': {
- 'name': 'java_opts',
- 'label': _('Java options'),
- 'value': [],
- 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b'),
- 'type': ''
- },
- 'capture_output': {
- 'name': 'capture_output',
- 'label': _('Capture output'),
- 'value': False,
- 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
- 'command output must be in Java Properties file format and it must not exceed 2KB. '
- 'From within the workflow definition, the output of an %(program)s action node is accessible '
- 'via the String action:output(String node, String key) function') % {'program': TYPE.title()},
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': [],
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['jar_path'], cls.FIELDS['main_class']]
- class HiveAction(Action):
- TYPE = 'hive'
- DEFAULT_CREDENTIALS = 'hcat'
- FIELDS = {
- 'script_path': {
- 'name': 'script_path',
- 'label': _('Script'),
- 'value': '',
- 'help_text': _('Path to the script on HDFS.'),
- 'type': ''
- },
- 'parameters': {
- 'name': 'parameters',
- 'label': _('Parameters'),
- 'value': [],
- 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'hive_xml': {
- 'name': 'hive_xml',
- 'label': _('Hive XML'),
- 'value': '',
- 'help_text': _('Refer to a hive-site.xml for connecting to Hive'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['script_path'], cls.FIELDS['hive_xml']]
- def _get_hiveserver2_url():
- try:
- from beeswax.hive_site import hiveserver2_jdbc_url
- return hiveserver2_jdbc_url()
- except Exception, e:
- # Might fail is Hive is disabled
- LOG.warn('Could not guess HiveServer2 URL: %s' % smart_str(e))
- return 'jdbc:hive2://localhost:10000/default'
- class HiveServer2Action(Action):
- TYPE = 'hive2'
- DEFAULT_CREDENTIALS = 'hive2'
- FIELDS = {
- 'script_path': {
- 'name': 'script_path',
- 'label': _('Script'),
- 'value': '',
- 'help_text': _('Path to the script on HDFS.'),
- 'type': ''
- },
- 'parameters': {
- 'name': 'parameters',
- 'label': _('Parameters'),
- 'value': [],
- 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
- 'type': ''
- },
- # Common
- 'jdbc_url': {
- 'name': 'jdbc_url',
- 'label': _('HiveServer2 URL'),
- 'value': "",
- 'help_text': _('e.g. jdbc:hive2://localhost:10000/default. JDBC URL for the Hive Server 2.'),
- 'type': ''
- },
- 'password': {
- 'name': 'password',
- 'label': _('Password'),
- 'value': '',
- 'help_text': _('The password element must contain the password of the current user. However, the password is only used if Hive Server 2 is backed by '
- 'something requiring a password (e.g. LDAP); non-secured Hive Server 2 or Kerberized Hive Server 2 don\'t require a password.'),
- 'type': ''
- },
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['script_path']]
- class SubWorkflowAction(Action):
- TYPE = 'subworkflow'
- FIELDS = {
- 'workflow': {
- 'name': 'workflow',
- 'label': _('Sub-workflow'),
- 'value': None,
- 'help_text': _('The sub-workflow application to include. You must own all the sub-workflows'),
- 'type': 'workflow'
- },
- 'propagate_configuration': {
- 'name': 'propagate_configuration',
- 'label': _('Propagate configuration'),
- 'value': True,
- 'help_text': _('If the workflow job configuration should be propagated to the child workflow.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('Can be used to specify the job properties that are required to run the child workflow job.'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['workflow']]
- class SqoopAction(Action):
- TYPE = 'sqoop'
- FIELDS = {
- 'command': {
- 'name': 'command',
- 'label': _('Sqoop command'),
- 'value': 'import --connect jdbc:hsqldb:file:db.hsqldb --table TT --target-dir hdfs://localhost:8020/user/foo -m 1',
- 'help_text': _('The full %(type)s command. Either put it here or split it by spaces and insert the parts as multiple parameters below.') % {'type': TYPE},
- 'type': 'textarea'
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('If no command is specified, split the command by spaces and insert the %(type)s parameters '
- 'here e.g. import, --connect, jdbc:hsqldb:file:db.hsqldb, ...') % {'type': TYPE},
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['command']]
- class MapReduceAction(Action):
- TYPE = 'mapreduce'
- FIELDS = {
- 'jar_path': {
- 'name': 'jar_path',
- 'label': _('Jar name'),
- 'value': '',
- 'help_text': _('Path to the jar on HDFS.'),
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['jar_path']]
- class ShellAction(Action):
- TYPE = 'shell'
- FIELDS = {
- 'shell_command': {
- 'name': 'shell_command',
- 'label': _('Shell command'),
- 'value': '',
- 'help_text': _('Shell command to execute, e.g script.sh'),
- 'type': ''
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('One arg, e.g. -l, --help'),
- 'type': ''
- },
- 'env_var': {
- 'name': 'env_var',
- 'label': _('Environment variables'),
- 'value': [],
- 'help_text': _('e.g. MAX=10 or PATH=$PATH:mypath'),
- 'type': ''
- },
- 'capture_output': {
- 'name': 'capture_output',
- 'label': _('Capture output'),
- 'value': True,
- 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
- 'command output must be in Java Properties file format and it must not exceed 2KB. '
- 'From within the workflow definition, the output of an %(program)s action node is accessible '
- 'via the String action:output(String node, String key) function') % {'program': TYPE},
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['shell_command']]
- class SshAction(Action):
- TYPE = 'ssh'
- FIELDS = {
- 'host': {
- 'name': 'host',
- 'label': _('User and Host'),
- 'value': 'user@host.com',
- 'help_text': _('Where the shell will be executed.'),
- 'type': 'text'
- },
- 'ssh_command': {
- 'name': 'ssh_command',
- 'label': _('Ssh command'),
- 'value': 'ls',
- 'help_text': _('The path of the Shell command to execute.'),
- 'type': 'textarea'
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('One arg, e.g. -l, --help'),
- 'type': ''
- },
- 'capture_output': {
- 'name': 'capture_output',
- 'label': _('Capture output'),
- 'value': True,
- 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
- 'command output must be in Java Properties file format and it must not exceed 2KB. '
- 'From within the workflow definition, the output of an %(program)s action node is accessible '
- 'via the String action:output(String node, String key) function') % {'program': TYPE},
- 'type': ''
- },
- # Common
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['host'], cls.FIELDS['ssh_command']]
- class FsAction(Action):
- TYPE = 'fs'
- FIELDS = {
- 'deletes': {
- 'name': 'deletes',
- 'label': _('Delete path'),
- 'value': [],
- 'help_text': _('Deletes recursively all content.'),
- 'type': ''
- },
- 'mkdirs': {
- 'name': 'mkdirs',
- 'label': _('Create directory'),
- 'value': [],
- 'help_text': _('Sub directories are created if needed.'),
- 'type': ''
- },
- 'moves': {
- 'name': 'moves',
- 'label': _('Move file or directory'),
- 'value': [],
- 'help_text': _('Destination.'),
- 'type': ''
- },
- 'chmods': {
- 'name': 'chmods',
- 'label': _('Change permissions'),
- 'value': [],
- 'help_text': _('File or directory.'),
- 'type': ''
- },
- 'touchzs': {
- 'name': 'touchzs',
- 'label': _('Create or touch a file'),
- 'value': [],
- 'help_text': _('Or update its modification date.'),
- 'type': ''
- },
- 'chgrps': {
- 'name': 'chgrps',
- 'label': _('Change the group'),
- 'value': [],
- 'help_text': _('File or directory.'),
- 'type': ''
- },
- # Common
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return []
- class EmailAction(Action):
- TYPE = 'email'
- FIELDS = {
- 'to': {
- 'name': 'to',
- 'label': _('To addresses'),
- 'value': '',
- 'help_text': _('Comma-separated values'),
- 'type': 'text'
- },
- 'cc': {
- 'name': 'cc',
- 'label': _('cc'),
- 'value': '',
- 'help_text': _('Comma-separated values'),
- 'type': 'text'
- },
- 'bcc': {
- 'name': 'bcc',
- 'label': _('bcc'),
- 'value': '',
- 'help_text': _('Comma-separated values'),
- 'type': 'text'
- },
- 'subject': {
- 'name': 'subject',
- 'label': _('Subject'),
- 'value': '',
- 'help_text': _('Plain-text'),
- 'type': 'text'
- },
- 'body': {
- 'name': 'body',
- 'label': _('Body'),
- 'value': '',
- 'help_text': _('Plain-text'),
- 'type': 'textarea'
- },
- 'attachment': {
- 'name': 'attachment',
- 'label': _('Attachment'),
- 'value': '',
- 'help_text': _('Comma separated list of HDFS files.'),
- 'type': ''
- },
- 'content_type': {
- 'name': 'content_type',
- 'label': _('Content-type'),
- 'value': 'text/plain',
- 'help_text': _('Default is text/plain'),
- 'type': 'text'
- },
- # Common
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['to'], cls.FIELDS['subject'], cls.FIELDS['body']]
- class StreamingAction(Action):
- TYPE = 'streaming'
- FIELDS = {
- 'mapper': {
- 'name': 'mapper',
- 'label': _('Mapper'),
- 'value': '',
- 'help_text': _('The executable/script to be used as mapper.'),
- 'type': ''
- },
- 'reducer': {
- 'name': 'reducer',
- 'label': _('Reducer'),
- 'value': '',
- 'help_text': _('The executable/script to be used as reducer.'),
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.')
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.')
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production')
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.')
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml')
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['mapper'], cls.FIELDS['reducer']]
- class DistCpAction(Action):
- TYPE = 'distcp'
- FIELDS = {
- 'distcp_parameters': {
- 'name': 'distcp_parameters',
- 'label': _('Arguments'),
- 'value': [{'value': ''}, {'value': ''}],
- 'help_text': _('Options first, then source / destination paths'),
- 'type': 'distcp'
- },
- # Common
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.')
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production')
- },
- 'java_opts': {
- 'name': 'java_opts',
- 'label': _('Java options'),
- 'value': '',
- 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b')
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['distcp_parameters']]
- class SparkAction(Action):
- TYPE = 'spark'
- FIELDS = {
- 'spark_master': {
- 'name': 'spark_master',
- 'label': _('Spark Master'),
- 'value': 'local[*]',
- 'help_text': _('Ex: spark://host:port, mesos://host:port, yarn, or local.'),
- 'type': ''
- },
- 'mode': {
- 'name': 'mode',
- 'label': _('Mode'),
- 'value': 'client',
- 'help_text': _('e.g. client,cluster'),
- 'type': ''
- },
- 'app_name': {
- 'name': 'app_name',
- 'label': _('App name'),
- 'value': 'MySpark',
- 'help_text': _('The name of the spark application'),
- 'type': ''
- },
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'class': {
- 'name': 'class',
- 'label': _('Main class'),
- 'value': '',
- 'help_text': _("e.g. org.apache.spark.examples.mllib.JavaALS."),
- 'type': 'text'
- },
- 'jars': {
- 'name': 'jars',
- 'label': _('Jars/py files'),
- 'value': '',
- 'help_text': _('Comma separated list of jars or python HDFS files.'),
- 'type': ''
- },
- 'spark_opts': {
- 'name': 'spark_opts',
- 'label': _('Options list'),
- 'value': '',
- 'help_text': _('Ex: --executor-memory 20G --num-executors 50'),
- 'type': ''
- },
- 'spark_arguments': {
- 'name': 'spark_arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('Arguments, one by one, e.g. 1000, /path/a.')
- },
- # Common
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production')
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.')
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['spark_master'], cls.FIELDS['mode'], cls.FIELDS['jars']]
- class KillAction(Action):
- TYPE = 'kill'
- FIELDS = {
- 'message': {
- 'name': 'message',
- 'label': _('Message'),
- 'value': _('Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]'),
- 'help_text': _('Message to display when the workflow fails. Can contain some EL functions.'),
- 'type': 'textarea'
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['message']]
- class JoinAction(Action):
- TYPE = 'join'
- FIELDS = {}
- @classmethod
- def get_mandatory_fields(cls):
- return []
- class GenericAction(Action):
- TYPE = 'generic'
- FIELDS = {
- 'xml': {
- 'name': 'xml',
- 'label': _('XML of the action'),
- 'value': '<my_action>\n</my_action>',
- 'help_text': _('Insert verbatim the XML of the action to insert into the workflow.'),
- 'type': 'textarea'
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['xml']]
- class ForkNode(Action):
- TYPE = 'fork'
- FIELDS = {}
- @classmethod
- def get_mandatory_fields(cls):
- return []
- class HiveDocumentAction(Action):
- TYPE = 'hive-document'
- DEFAULT_CREDENTIALS = 'hive2'
- FIELDS = {
- 'uuid': {
- 'name': 'uuid',
- 'label': _('Hive query'),
- 'value': '',
- 'help_text': _('Select a saved Hive query you want to schedule.'),
- 'type': 'hive'
- },
- 'parameters': {
- 'name': 'parameters',
- 'label': _('Parameters'),
- 'value': [],
- 'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
- 'type': ''
- },
- # Common
- 'jdbc_url': {
- 'name': 'jdbc_url',
- 'label': _('HiveServer2 URL'),
- 'value': "",
- 'help_text': _('e.g. jdbc:hive2://localhost:10000/default. JDBC URL for the Hive Server 2.'),
- 'type': ''
- },
- 'password': {
- 'name': 'password',
- 'label': _('Password'),
- 'value': '',
- 'help_text': _('The password element must contain the password of the current user. However, the password is only used if Hive Server 2 is backed by '
- 'something requiring a password (e.g. LDAP); non-secured Hive Server 2 or Kerberized Hive Server 2 don\'t require a password.'),
- 'type': ''
- },
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': '',
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['uuid']]
- class JavaDocumentAction(Action):
- TYPE = 'java-document'
- FIELDS = {
- 'uuid': {
- 'name': 'uuid',
- 'label': _('Java program'),
- 'value': '',
- 'help_text': _('Select a saved Java program you want to schedule.'),
- 'type': 'java'
- },
- 'arguments': {
- 'name': 'arguments',
- 'label': _('Arguments'),
- 'value': [],
- 'help_text': _('Arguments of the main method. The value of each arg element is considered a single argument '
- 'and they are passed to the main method in the same order.'),
- 'type': ''
- },
- 'java_opts': {
- 'name': 'java_opts',
- 'label': _('Java options'),
- 'value': [],
- 'help_text': _('Parameters for the JVM, e.g. -Dprop1=a -Dprop2=b'),
- 'type': ''
- },
- 'capture_output': {
- 'name': 'capture_output',
- 'label': _('Capture output'),
- 'value': False,
- 'help_text': _('Capture output of the stdout of the %(program)s command execution. The %(program)s '
- 'command output must be in Java Properties file format and it must not exceed 2KB. '
- 'From within the workflow definition, the output of an %(program)s action node is accessible '
- 'via the String action:output(String node, String key) function') % {'program': TYPE.title()},
- 'type': ''
- },
- # Common
- 'files': {
- 'name': 'files',
- 'label': _('Files'),
- 'value': [],
- 'help_text': _('Files put in the running directory.'),
- 'type': ''
- },
- 'archives': {
- 'name': 'archives',
- 'label': _('Archives'),
- 'value': [],
- 'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
- 'type': ''
- },
- 'job_properties': {
- 'name': 'job_properties',
- 'label': _('Hadoop job properties'),
- 'value': [],
- 'help_text': _('value, e.g. production'),
- 'type': ''
- },
- 'prepares': {
- 'name': 'prepares',
- 'label': _('Prepares'),
- 'value': [],
- 'help_text': _('Path to manipulate before starting the application.'),
- 'type': ''
- },
- 'job_xml': {
- 'name': 'job_xml',
- 'label': _('Job XML'),
- 'value': [],
- 'help_text': _('Refer to a Hadoop JobConf job.xml'),
- 'type': ''
- },
- 'retry_max': {
- 'name': 'retry_max',
- 'label': _('Max retry'),
- 'value': [],
- 'help_text': _('Number of times, default is 3'),
- 'type': ''
- },
- 'retry_interval': {
- 'name': 'retry_interval',
- 'label': _('Retry interval'),
- 'value': [],
- 'help_text': _('Wait time in minutes, default is 10'),
- 'type': ''
- }
- }
- @classmethod
- def get_mandatory_fields(cls):
- return [cls.FIELDS['uuid']]
- class DecisionNode(Action):
- TYPE = 'decision'
- FIELDS = {}
- @classmethod
- def get_mandatory_fields(cls):
- return []
- NODES = {
- 'start-widget': StartNode,
- 'end-widget': EndNode,
- 'pig-widget': PigAction,
- 'java-widget': JavaAction,
- 'hive-widget': HiveAction,
- 'hive2-widget': HiveServer2Action,
- 'sqoop-widget': SqoopAction,
- 'mapreduce-widget': MapReduceAction,
- 'subworkflow-widget': SubWorkflowAction,
- 'shell-widget': ShellAction,
- 'ssh-widget': SshAction,
- 'fs-widget': FsAction,
- 'email-widget': EmailAction,
- 'streaming-widget': StreamingAction,
- 'distcp-widget': DistCpAction,
- 'kill-widget': KillAction,
- 'join-widget': JoinAction,
- 'fork-widget': ForkNode,
- 'decision-widget': DecisionNode,
- 'spark-widget': SparkAction,
- 'generic-widget': GenericAction,
- 'hive-document-widget': HiveDocumentAction,
- 'java-document-widget': JavaDocumentAction
- }
- WORKFLOW_NODE_PROPERTIES = {}
- for node in NODES.itervalues():
- WORKFLOW_NODE_PROPERTIES.update(node.FIELDS)
- def find_parameters(instance, fields=None):
- """Find parameters in the given fields"""
- if fields is None:
- fields = NODES['%s-widget' % instance.data['type']].FIELDS.keys()
- params = []
- for field in fields:
- data = instance.data['properties'][field]
- if field == 'sla' and not instance.sla_enabled:
- continue
- if isinstance(data, list):
- params.extend(find_json_parameters(data))
- elif isinstance(data, basestring):
- for match in Template.pattern.finditer(data):
- name = match.group('braced')
- if name is not None:
- params.append(name)
- return params
- def find_json_parameters(fields):
- # Input is list of json dict
- params = []
- for field in fields:
- for data in field.values():
- if isinstance(data, basestring):
- for match in Template.pattern.finditer(data):
- name = match.group('braced')
- if name is not None:
- params.append(name)
- return params
- def find_dollar_variables(text):
- return re.findall('[^\n\\\\]\$([^\{ \'\"\-;\(\)]+)', text, re.MULTILINE)
- def find_dollar_braced_variables(text):
- vars = set()
- for var in re.findall('\$\{([A-Za-z0-9:_-]+)\}', text, re.MULTILINE):
- if ':' in var:
- var = var.split(':', 1)[1]
- vars.add(var)
- return list(vars)
- def import_workflow_from_hue_3_7(old_wf):
- """
- Example of data to transform
- [<Start: start>, <Pig: Pig>, [<Kill: kill>], [<End: end>]]
- [<Start: start>, <Java: TeraGenWorkflow>, <Java: TeraSort>, [<Kill: kill>], [<End: end>]]
- [<Start: start>, [<Fork: fork-34>, [[<Mapreduce: Sleep-1>, <Mapreduce: Sleep-10>], [<Mapreduce: Sleep-5>, [<Fork: fork-38>, [[<Mapreduce: Sleep-3>], [<Mapreduce: Sleep-4>]], <Join: join-39>]]], <Join: join-35>], [<Kill: kill>], [<End: end>]]
- """
- uuids = {}
- old_nodes = old_wf.get_hierarchy()
- wf = Workflow()
- wf_rows = []
- wf_nodes = []
- data = wf.get_data()
- # UUIDs node mapping
- for node in old_wf.node_list:
- if node.name == 'kill':
- node_uuid = '17c9c895-5a16-7443-bb81-f34b30b21548'
- elif node.name == 'start':
- node_uuid = '3f107997-04cc-8733-60a9-a4bb62cebffc'
- elif node.name == 'end':
- node_uuid = '33430f0f-ebfa-c3ec-f237-3e77efa03d0a'
- else:
- node_uuid = str(uuid.uuid4())
- uuids[node.id] = node_uuid
- # Workflow
- data['workflow']['uuid'] = str(uuid.uuid4())
- data['workflow']['name'] = old_wf.name
- data['workflow']['properties']['properties'] = json.loads(old_wf.job_properties)
- data['workflow']['properties']['job_xml'] = old_wf.job_xml
- data['workflow']['properties']['description'] = old_wf.description
- data['workflow']['properties']['schema_version'] = old_wf.schema_version
- data['workflow']['properties']['deployment_dir'] = old_wf.deployment_dir
- data['workflow']['properties']['parameters'] = json.loads(old_wf.parameters)
- data['workflow']['properties']['description'] = old_wf.description
- data['workflow']['properties']['sla'] = old_wf.sla
- data['workflow']['properties']['sla_enabled'] = old_wf.sla_enabled
- data['workflow']['properties']['imported'] = True
- data['workflow']['properties']['wf1_id'] = old_wf.id
- # Layout
- rows = data['layout'][0]['rows']
- def _create_layout(nodes, size=12):
- wf_rows = []
- for node in nodes:
- if type(node) == list and len(node) == 1:
- node = node[0]
- if type(node) != list:
- wf_rows.append({"widgets":[{"size":size, "name": node.name.title(), "id": uuids[node.id], "widgetType": "%s-widget" % node.node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- else:
- if node[0].node_type == 'fork':
- wf_rows.append({"widgets":[{"size":size, "name": 'Fork', "id": uuids[node[0].id], "widgetType": "%s-widget" % node[0].node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- wf_rows.append({
- "id": str(uuid.uuid4()),
- "widgets":[
- ],
- "columns":[
- {
- "id": str(uuid.uuid4()),
- "size": (size / len(node[1])),
- "rows":
- [{
- "id": str(uuid.uuid4()),
- "widgets": c['widgets'],
- "columns":[]
- }
- for c in col] if type(col) == list else [{
- "id": str(uuid.uuid4()),
- "widgets": col['widgets'],
- "columns":[]
- }
- ]
- ,
- "klass":"card card-home card-column span%s" % (size / len(node[1]))
- }
- for col in _create_layout(node[1], size)
- ]
- })
- wf_rows.append({"widgets":[{"size":size, "name": 'Join', "id": uuids[node[2].id], "widgetType": "%s-widget" % node[2].node_type, "properties":{}, "offset":0, "isLoading":False, "klass":"card card-widget span%s" % size, "columns":[]}]})
- else:
- wf_rows.append(_create_layout(node, size))
- return wf_rows
- wf_rows = _create_layout(old_nodes)
- if wf_rows:
- data['layout'][0]['rows'] = [data['layout'][0]['rows'][0]] + wf_rows + [data['layout'][0]['rows'][-1]]
- # Content
- def _dig_nodes(nodes):
- for node in nodes:
- if type(node) != list:
- properties = {}
- if '%s-widget' % node.node_type in NODES:
- properties = dict(NODES['%s-widget' % node.node_type].get_fields())
- if node.node_type == 'pig':
- properties['script_path'] = node.script_path
- properties['parameters'] = [param for param in json.loads(node.params) if param['value'] != '-param']
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'hive':
- properties['script_path'] = node.script_path
- properties['parameters'] = [param for param in json.loads(node.params) if param['value'] != '-param']
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['hive_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'java':
- properties['jar_path'] = node.jar_path
- properties['main_class'] = node.main_class
- properties['arguments'] = [{'value': arg} for arg in node.args.split(' ')]
- properties['java_opts'] = node.java_opts
- properties['capture_output'] = node.capture_output
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'sqoop':
- properties['command'] = node.script_path
- properties['parameters'] = json.loads(node.params)
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'mapreduce':
- properties['jar_path'] = node.jar_path
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'shell':
- properties['shell_command'] = node.command
- properties['arguments'] = json.loads(node.params)
- properties['capture_output'] = node.capture_output
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'ssh':
- properties['user'] = '%s@%s' % (node.user, node.host)
- properties['ssh_command'] = node.command
- properties['params'] = json.loads(node.params)
- properties['capture_output'] = node.capture_output
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'fs':
- properties['deletes'] = [{'value': f['name']} for f in json.loads(node.deletes)]
- properties['mkdirs'] = [{'value': f['name']} for f in json.loads(node.mkdirs)]
- properties['moves'] = json.loads(node.moves)
- chmods = json.loads(node.chmods)
- for c in chmods:
- c['value'] = c['path']
- c['dir_files'] = False
- properties['chmods'] = chmods
- properties['touchzs'] = [{'value': f['name']} for f in json.loads(node.touchzs)]
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'email':
- properties['to'] = node.to
- properties['cc'] = node.cc
- properties['subject'] = node.subject
- properties['body'] = node.body
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'streaming':
- properties['mapper'] = node.mapper
- properties['reducer'] = node.reducer
- properties['files'] = [{'value': f} for f in json.loads(node.files)]
- properties['archives'] = json.loads(node.archives)
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['job_xml'] = node.job_xml
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- elif node.node_type == 'distcp':
- properties['distcp_parameters'] = json.loads(node.params)
- properties['java_opts'] = node.job_xml
- properties['job_properties'] = json.loads(node.job_properties)
- properties['prepares'] = json.loads(node.prepares)
- properties['description'] = node.description
- properties['sla'] = node.sla
- properties['sla_enabled'] = node.sla_enabled
- wf_nodes.append({
- "id": uuids[node.id],
- "name": '%s-%s' % (node.node_type.split('-')[0], uuids[node.id][:4]),
- "type": "%s-widget" % node.node_type,
- "properties": properties,
- "children":[{('to' if link.name in ('ok', 'start') else link.name): uuids[link.child.get_full_node().id]} for link in node.get_children_links()]
- })
- else:
- _dig_nodes(node)
- _dig_nodes(old_nodes)
- data['workflow']['nodes'] = wf_nodes
- return Workflow(data=json.dumps(data))
- class Coordinator(Job):
- XML_FILE_NAME = 'coordinator.xml'
- PROPERTY_APP_PATH = 'oozie.coord.application.path'
- HUE_ID = 'hue-id-c'
- def __init__(self, data=None, json_data=None, document=None):
- self.document = document
- if document is not None:
- self._data = json.loads(document.data)
- elif json_data is not None:
- self._data = json.loads(json_data)
- elif data is not None:
- self._data = data
- else:
- self._data = {
- 'id': None,
- 'uuid': None,
- 'name': 'My Coordinator',
- 'variables': [], # Aka workflow parameters
- 'properties': {
- 'description': '',
- 'deployment_dir': '',
- 'schema_version': 'uri:oozie:coordinator:0.2',
- 'frequency_number': 1,
- 'frequency_unit': 'days',
- 'cron_frequency': '0 0 * * *',
- 'cron_advanced': False,
- 'timezone': '',
- 'start': '${start_date}',
- 'end': '${end_date}',
- 'workflow': None,
- 'timeout': None,
- 'concurrency': None,
- 'execution': None,
- 'throttle': None,
- 'job_xml': '',
- 'credentials': [],
- 'parameters': [
- {'name': 'oozie.use.system.libpath', 'value': True},
- {'name': 'start_date', 'value': datetime.today().strftime('%Y-%m-%dT%H:%M')},
- {'name': 'end_date', 'value': (datetime.today() + timedelta(days=7)).strftime('%Y-%m-%dT%H:%M')}
- ],
- 'sla': WorkflowConfiguration.SLA_DEFAULT
- }
- }
- @property
- def id(self):
- return self.document.id
- @property
- def uuid(self):
- return self.document.uuid
- def get_data_for_json(self):
- _data = self.data.copy()
- start_date = filter(lambda a: a['name'] == 'start_date', self._data['properties']['parameters'])
- if start_date and type(start_date[0]['value']) == datetime:
- start_date[0]['value'] = start_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
- end_date = filter(lambda a: a['name'] == 'end_date', self._data['properties']['parameters'])
- if end_date and type(end_date[0]['value']) == datetime:
- end_date[0]['value'] = end_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
- return _data
- def to_json(self):
- return json.dumps(self.get_data_for_json())
- def to_json_for_html(self):
- return json.dumps(self.get_data_for_json(), cls=JSONEncoderForHTML)
- @property
- def data(self):
- if type(self._data['properties']['start']) != datetime and not '$' in self._data['properties']['start']:
- self._data['properties']['start'] = parse(self._data['properties']['start'])
- if type(self._data['properties']['end']) != datetime and not '$' in self._data['properties']['end']:
- self._data['properties']['end'] = parse(self._data['properties']['end'])
- if self.document is not None:
- self._data['id'] = self.document.id
- return self._data
- @property
- def name(self):
- return self.data['name']
- def set_workspace(self, user):
- self.data['properties']['deployment_dir'] = Job.get_workspace(user)
- @property
- def deployment_dir(self):
- return self.data['properties']['deployment_dir']
- def find_parameters(self):
- params = set()
- for param in find_dollar_braced_variables(self.name):
- params.add(param)
- for param in find_json_parameters([self.data['properties']]):
- params.add(param)
- for param in find_json_parameters(self.data['variables']):
- if param not in ('MINUTE', 'HOUR', 'DAY', 'MONTH', 'YEAR') and not param.startswith('coord:'):
- params.add(param)
- if self.sla_enabled:
- for param in find_json_parameters(self.sla):
- params.add(param)
- # Get missed params from workflow
- for prop in self.workflow.find_parameters():
- if not prop in params:
- params.add(prop)
- # Remove the ones filled up by coordinator
- removable_names = [ds['workflow_variable'] for ds in self.data['variables']]
- return dict([(param, '') for param in list(params) if param not in removable_names])
- @property
- def sla_enabled(self):
- return self.data['properties']['sla'][0].get('value')
- @property
- def sla(self):
- return self.data['properties']['sla']
- @property
- def parameters(self):
- return self.data['properties']['parameters']
- @property
- def datasets(self):
- return self.inputDatasets + self.outputDatasets
- @property
- def inputDatasets(self):
- return [Dataset(dataset, self) for dataset in self.data['variables'] if dataset['dataset_type'] == 'input_path']
- @property
- def outputDatasets(self):
- return [Dataset(dataset, self) for dataset in self.data['variables'] if dataset['dataset_type'] == 'output_path']
- @property
- def start_server_tz(self):
- return self.data['properties']['start']
- @property
- def end_server_tz(self):
- return self.data['properties']['end']
- @property
- def frequency(self):
- return '${coord:%(unit)s(%(number)d)}' % {'unit': self.data['properties']['frequency_unit'], 'number': self.data['properties']['frequency_number']}
- @property
- def cron_frequency(self):
- data_dict = self.data['properties']
- if 'cron_frequency' in data_dict:
- return data_dict['cron_frequency']
- else:
- # Backward compatibility
- freq = '0 0 * * *'
- if data_dict['frequency_number'] == 1:
- if data_dict['frequency_unit'] == 'minutes':
- freq = '* * * * *'
- elif data_dict['frequency_unit'] == 'hours':
- freq = '0 * * * *'
- elif data_dict['frequency_unit'] == 'days':
- freq = '0 0 * * *'
- elif data_dict['frequency_unit'] == 'months':
- freq = '0 0 0 * *'
- return {'frequency': freq, 'isAdvancedCron': False}
- def to_xml(self, mapping=None):
- if mapping is None:
- mapping = {}
- tmpl = "editor2/gen/coordinator.xml.mako"
- return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})).encode('utf-8', 'xmlcharrefreplace')
- def clear_workflow_params(self):
- # Repopulated in the config properties
- self.data['variables'] = [dataset for dataset in self.data['variables'] if dataset['dataset_type'] != 'parameter']
- @property
- def properties(self):
- props = [{'name': dataset['workflow_variable'], 'value': dataset['dataset_variable']} for dataset in self.data['variables'] if dataset['dataset_type'] == 'parameter']
- props += self.data['properties']['parameters']
- return props
- @property
- def workflow(self):
- if self.document is None:
- raise PopupException(_('Cannot return workflow since document attribute is None.'))
- wf_doc = Document2.objects.get_by_uuid(user=self.document.owner, uuid=self.data['properties']['workflow'])
- return Workflow(document=wf_doc)
- def get_absolute_url(self):
- return reverse('oozie:edit_coordinator') + '?coordinator=%s' % self.id
- @classmethod
- def get_application_path_key(cls):
- return 'oozie.coord.application.path'
- class Dataset():
- def __init__(self, data, coordinator):
- self._data = data
- self.coordinator = coordinator
- @property
- def data(self):
- self._data['name'] = self._data['workflow_variable']
- return self._data
- @property
- def frequency(self):
- if self.data['same_frequency']:
- if self.coordinator.cron_frequency == '* * * * *':
- frequency_unit = 'minutes'
- elif self.coordinator.cron_frequency == '0 * * * *':
- frequency_unit = 'hours'
- elif self.coordinator.cron_frequency == '0 0 * * *':
- frequency_unit = 'days'
- elif self.coordinator.cron_frequency == '0 0 0 * *':
- frequency_unit = 'months'
- else:
- raise PopupException(_('The frequency of the workflow parameter "%s" cannot be guessed from the frequency of the coordinator.'
- ' It so needs to be specified manually.') % self.data['name'])
- frequency_number = 1
- else:
- frequency_unit = self.data['frequency_unit']
- frequency_number = self.data['frequency_number']
- return '${coord:%(unit)s(%(number)s)}' % {'unit': frequency_unit, 'number': frequency_number}
- @property
- def start_server_tz(self):
- if self.data['same_start']:
- return self.coordinator.start_server_tz
- else:
- return convert_to_server_timezone(self.data['start'], self.data['timezone'])
- @property
- def timezone(self):
- if self.data['same_timezone']:
- return self.coordinator.data['properties']['timezone']
- else:
- return self.data['timezone']
- @property
- def start_instance(self):
- if not self.is_advanced_start_instance:
- return int(self.data['advanced_start_instance'])
- else:
- return 0
- @property
- def is_advanced_start_instance(self):
- return not self.is_int(self.data['advanced_start_instance'])
- def is_int(self, text):
- try:
- int(text)
- return True
- except ValueError:
- return False
- @property
- def end_instance(self):
- if not self.is_advanced_end_instance:
- return int(self.data['advanced_end_instance'])
- else:
- return 0
- @property
- def is_advanced_end_instance(self):
- return not self.is_int(self.data['advanced_end_instance'])
- class Bundle(Job):
- XML_FILE_NAME = 'bundle.xml'
- PROPERTY_APP_PATH = 'oozie.bundle.application.path'
- HUE_ID = 'hue-id-b'
- def __init__(self, data=None, json_data=None, document=None):
- self.document = document
- if document is not None:
- self._data = json.loads(document.data)
- elif json_data is not None:
- self._data = json.loads(json_data)
- elif data is not None:
- self._data = data
- else:
- self._data = {
- 'id': None,
- 'uuid': None,
- 'name': 'My Bundle',
- 'coordinators': [],
- 'properties': {
- 'description': '',
- 'deployment_dir': '',
- 'schema_version': 'uri:oozie:bundle:0.2',
- 'kickoff': datetime.today(),
- 'parameters': [{'name': 'oozie.use.system.libpath', 'value': 'true'}]
- }
- }
- @property
- def id(self):
- return self.document.id
- @property
- def uuid(self):
- return self.document.uuid
- def get_data_for_json(self):
- _data = self.data.copy()
- _data['properties']['kickoff'] = _data['properties']['kickoff'].strftime('%Y-%m-%dT%H:%M:%S')
- return _data
- def to_json(self):
- return json.dumps(self.get_data_for_json())
- def to_json_for_html(self):
- return json.dumps(self.get_data_for_json(), cls=JSONEncoderForHTML)
- @property
- def data(self):
- if type(self._data['properties']['kickoff']) == unicode:
- self._data['properties']['kickoff'] = parse(self._data['properties']['kickoff'])
- if self.document is not None:
- self._data['id'] = self.document.id
- return self._data
- def to_xml(self, mapping=None):
- if mapping is None:
- mapping = {}
- mapping.update(dict(list(self.get_coordinator_docs().values('uuid', 'name'))))
- tmpl = "editor2/gen/bundle.xml.mako"
- return force_unicode(
- re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
- 'bundle': self,
- 'mapping': mapping
- })))
- def get_coordinator_docs(self):
- coordinator_ids = [coordinator['coordinator'] for coordinator in self.data['coordinators']]
- return Document2.objects.filter(type='oozie-coordinator2', uuid__in=coordinator_ids)
- def get_coordinator_objects(self):
- return [Coordinator(document=doc) for doc in self.get_coordinator_docs()]
- @property
- def name(self):
- return self.data['name']
- @property
- def parameters(self):
- return self.data['properties']['parameters']
- @property
- def kick_off_time_utc(self):
- return utc_datetime_format(self.data['properties']['kickoff'])
- def set_workspace(self, user):
- self.data['properties']['deployment_dir'] = Job.get_workspace(user)
- @property
- def deployment_dir(self):
- return self.data['properties']['deployment_dir']
- def find_parameters(self):
- params = set()
- for param in find_dollar_braced_variables(self.name):
- params.add(param)
- for coord in self.get_coordinator_objects():
- params.update(coord.find_parameters())
- for param in find_json_parameters([self.data['properties']]):
- params.add(param)
- # Remove the ones filled up by bundle
- removable_names = [p['name'] for coord in self.data['coordinators'] for p in coord['properties']]
- return dict([(param, '') for param in list(params) if param not in removable_names])
- def get_absolute_url(self):
- return reverse('oozie:edit_bundle') + '?bundle=%s' % self.id
- @classmethod
- def get_application_path_key(cls):
- return 'oozie.bundle.application.path'
- class History(object):
- @classmethod
- def get_workflow_from_config(self, conf_dict):
- try:
- doc = Document2.objects.get(type='oozie-workflow2', id=conf_dict.get(Workflow.HUE_ID))
- return Workflow(document=doc)
- except Document2.DoesNotExist:
- pass
- @classmethod
- def get_coordinator_from_config(self, conf_dict):
- try:
- doc = Document2.objects.get(type='oozie-coordinator2', id=conf_dict.get(Coordinator.HUE_ID))
- return Coordinator(document=doc)
- except Document2.DoesNotExist:
- pass
- @classmethod
- def get_bundle_from_config(self, conf_dict):
- try:
- doc = Document2.objects.get(type='oozie-bundle2', id=conf_dict.get(Bundle.HUE_ID))
- return Bundle(document=doc)
- except Document2.DoesNotExist:
- pass
- def _import_workspace(fs, user, job):
- source_workspace_dir = job.deployment_dir
- job.set_workspace(user)
- job.check_workspace(fs, user)
- job.import_workspace(fs, source_workspace_dir, user)
- def _save_workflow(workflow, layout, user, fs=None):
- if workflow.get('id'):
- workflow_doc = Document2.objects.get(id=workflow['id'])
- else:
- workflow_doc = Document2.objects.create(name=workflow['name'], uuid=workflow['uuid'], type='oozie-workflow2', owner=user, description=workflow['properties']['description'])
- Document.objects.link(workflow_doc, owner=workflow_doc.owner, name=workflow_doc.name, description=workflow_doc.description, extra='workflow2')
- # Excludes all the sub-workflow and Hive dependencies. Contains list of history and coordinator dependencies.
- workflow_doc.dependencies = workflow_doc.dependencies.exclude(Q(is_history=False) & Q(type__in=['oozie-workflow2', 'query-hive', 'query-java']))
- dependencies = \
- [node['properties']['workflow'] for node in workflow['nodes'] if node['type'] == 'subworkflow-widget'] + \
- [node['properties']['uuid'] for node in workflow['nodes'] if 'document-widget' in node['type']]
- if dependencies:
- dependency_docs = Document2.objects.filter(uuid__in=dependencies)
- workflow_doc.dependencies.add(*dependency_docs)
- if workflow['properties'].get('imported'): # We convert from and old workflow format (3.8 <) to the latest
- workflow['properties']['imported'] = False
- workflow_instance = Workflow(workflow=workflow, user=user)
- _import_workspace(fs, user, workflow_instance)
- workflow['properties']['deployment_dir'] = workflow_instance.deployment_dir
- workflow_doc.update_data({'workflow': workflow})
- workflow_doc.update_data({'layout': layout})
- workflow_doc1 = workflow_doc.doc.get()
- workflow_doc.name = workflow_doc1.name = workflow['name']
- workflow_doc.description = workflow_doc1.description = workflow['properties']['description']
- workflow_doc.save()
- workflow_doc1.save()
- return workflow_doc
- class WorkflowBuilder():
- """
- Building a workflow that has saved Documents for nodes (e.g Saved Hive query, saved Pig script...).
- """
- def create_workflow(self, user, document=None, documents=None, name=None, managed=False):
- nodes = []
- if documents is None:
- documents = [document]
- if name is None:
- name = _('Schedule of ') + ','.join([document.name or document.type for document in documents])
- for document in documents:
- if document.type == 'query-java':
- node = self.get_java_document_node(document, name)
- else:
- node = self.get_hive_document_node(document, name, user)
- nodes.append(node)
- workflow_doc = self.get_workflow(nodes, name, document.uuid, user, managed=managed)
- workflow_doc.dependencies.add(*documents)
- return workflow_doc
- def get_hive_document_node(self, document, name, user):
- api = get_oozie(user)
- credentials = [HiveDocumentAction.DEFAULT_CREDENTIALS] if api.security_enabled else []
- notebook = Notebook(document=document)
- parameters = find_dollar_braced_variables(notebook.get_str())
- parameters = [{u'value': u'%s=${%s}' % (p, p)} for p in parameters]
- return {
- u'name': u'doc-hive-%s' % document.uuid[:4],
- u'id': str(uuid.uuid4()),
- u'type': u'hive-document-widget',
- u'properties': {
- u'files': [],
- u'job_xml': u'',
- u'uuid': document.uuid,
- u'parameters': parameters,
- u'retry_interval': [],
- u'retry_max': [],
- u'job_properties': [],
- u'sla': [
- {u'key': u'enabled', u'value': False},
- {u'key': u'nominal-time', u'value': u'${nominal_time}'},
- {u'key': u'should-start', u'value': u''},
- {u'key': u'should-end', u'value': u'${30 * MINUTES}'},
- {u'key': u'max-duration', u'value': u''},
- {u'key': u'alert-events', u'value': u''},
- {u'key': u'alert-contact', u'value': u''},
- {u'key': u'notification-msg', u'value': u''},
- {u'key': u'upstream-apps', u'value': u''},
- ],
- u'archives': [],
- u'prepares': [],
- u'credentials': credentials,
- u'password': u'',
- u'jdbc_url': u'',
- },
- u'children': [
- {u'to': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a'},
- {u'error': u'17c9c895-5a16-7443-bb81-f34b30b21548'
- }],
- u'actionParameters': [],
- }
- def get_java_document_node(self, document, name):
- credentials = []
- return {
- "id": str(uuid.uuid4()),
- 'name': u'doc-hive-%s' % document.uuid[:4],
- "type":"java-document-widget",
- "properties":{
- u'uuid': document.uuid, # Files, main_class, arguments comes from there
- "job_xml":[],
- "jar_path": "",
- "java_opts":[],
- "retry_max":[],
- "retry_interval":[],
- "job_properties":[],
- "capture_output": False,
- "prepares":[],
- "credentials": credentials,
- "sla":[{"value":False, "key":"enabled"}, {"value":"${nominal_time}", "key":"nominal-time"}, {"value":"", "key":"should-start"}, {"value":"${30 * MINUTES}", "key":"should-end"}, {"value":"", "key":"max-duration"}, {"value":"", "key":"alert-events"}, {"value":"", "key":"alert-contact"}, {"value":"", "key":"notification-msg"}, {"value":"", "key":"upstream-apps"}],
- "archives":[]
- },
- "children":[
- {"to":"33430f0f-ebfa-c3ec-f237-3e77efa03d0a"},
- {"error":"17c9c895-5a16-7443-bb81-f34b30b21548"}
- ],
- "actionParameters":[],
- "actionParametersFetched": False
- }
- def get_workflow(self, nodes, name, doc_uuid, user, managed=False):
- parameters = []
- data = {
- 'workflow': {
- u'name': name,
- u'nodes': [{
- u'name': u'Start',
- u'properties': {},
- u'actionParametersFetched': False,
- u'id': u'3f107997-04cc-8733-60a9-a4bb62cebffc',
- u'type': u'start-widget',
- u'children': [{u'to': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a'}],
- u'actionParameters': [],
- }, {
- u'name': u'End',
- u'properties': {},
- u'actionParametersFetched': False,
- u'id': u'33430f0f-ebfa-c3ec-f237-3e77efa03d0a',
- u'type': u'end-widget',
- u'children': [],
- u'actionParameters': [],
- }, {
- u'name': u'Kill',
- u'properties': {
- u'body': u'',
- u'cc': u'',
- u'to': u'',
- u'enableMail': False,
- u'message': u'Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]',
- u'subject': u'',
- },
- u'actionParametersFetched': False,
- u'id': u'17c9c895-5a16-7443-bb81-f34b30b21548',
- u'type': u'kill-widget',
- u'children': [],
- u'actionParameters': [],
- }
- ],
- u'properties': {
- u'job_xml': u'',
- u'description': u'',
- u'wf1_id': None,
- u'sla_enabled': False,
- u'deployment_dir': Job.get_workspace(user),
- u'schema_version': u'uri:oozie:workflow:0.5',
- u'sla': [
- {u'key': u'enabled', u'value': False},
- {u'key': u'nominal-time', u'value': u'${nominal_time}'},
- {u'key': u'should-start', u'value': u''},
- {u'key': u'should-end', u'value': u'${30 * MINUTES}'},
- {u'key': u'max-duration', u'value': u''},
- {u'key': u'alert-events', u'value': u''},
- {u'key': u'alert-contact', u'value': u''},
- {u'key': u'notification-msg', u'value': u''},
- {u'key': u'upstream-apps', u'value': u''},
- ],
- u'show_arrows': True,
- u'parameters': parameters,
- u'properties': [],
- },
- u'uuid': str(uuid.uuid4()),
- }
- }
- _prev_node = data['workflow']['nodes'][0]
- for node in nodes:
- data['workflow']['nodes'].append(node)
- _prev_node['children'][0]['to'] = node['id'] # We link nodes
- _prev_node = node
- workflow_doc = _save_workflow(data['workflow'], {}, user)
- workflow_doc.is_managed = managed
- workflow_doc.save()
- return workflow_doc
|