Просмотр исходного кода

HUE-8509 [oozie] Remote job action

Romain Rigaux 7 лет назад
Родитель
Сommit
926816a

+ 18 - 16
apps/oozie/src/oozie/models2.py

@@ -784,7 +784,6 @@ class Node():
     if self.data['type'] in ('hive2', 'hive-document') and not self.data['properties']['jdbc_url']:
       self.data['properties']['jdbc_url'] = _get_hiveserver2_url()
 
-
     if self.data['type'] == 'fork':
       links = [link for link in self.data['children'] if link['to'] in node_mapping]
       if len(links) != len(self.data['children']):
@@ -792,7 +791,17 @@ class Node():
                  % (len(links), len(self.data['children']), links, self.data['children']))
         self.data['children'] = links
 
-    if self.data['type'] == JavaDocumentAction.TYPE:
+    if self.data['type'] == AltusAction.TYPE or ('altus' in mapping.get('cluster', '') and (self.data['type'] == SparkDocumentAction.TYPE or self.data['type'] == 'spark-document')):
+      shell_command_name = self.data['name'] + '.sh'
+      self.data['properties']['shell_command'] = shell_command_name
+      self.data['properties']['env_var'] = []
+      self.data['properties']['arguments'] = []
+      self.data['properties']['job_properties'] = []
+      self.data['properties']['capture_output'] = True
+      self.data['properties']['files'] = [{'value': shell_command_name}, {'value': 'altus.py'}]
+      self.data['properties']['archives'] = []
+
+    elif self.data['type'] == JavaDocumentAction.TYPE:
       notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
       properties = notebook.get_data()['snippets'][0]['properties']
 
@@ -912,18 +921,6 @@ class Node():
 
       self.data['properties']['files'] = files
       self.data['properties']['archives'] = []
-    elif self.data['type'] == AltusAction.TYPE:
-      shell_command_name = self.data['name'] + '.sh'
-      self.data['properties']['shell_command'] = shell_command_name
-      self.data['properties']['env_var'] = []
-      self.data['properties']['arguments'] = []
-      self.data['properties']['job_properties'] = []
-      self.data['properties']['capture_output'] = True
-
-      files = [{'value': shell_command_name}, {'value': 'altus.py'}]
-
-      self.data['properties']['files'] = files
-      self.data['properties']['archives'] = []
 
 
     data = {
@@ -949,7 +946,7 @@ class Node():
         if self.data['type'] == EndNode.TYPE:
           self.data['properties']['body'] = 'View result file at %(send_result_browse_url)s' % mapping
 
-    return django_mako.render_to_string(self.get_template_name(), data)
+    return django_mako.render_to_string(self.get_template_name(mapping), data)
 
   @property
   def id(self):
@@ -992,7 +989,10 @@ class Node():
     # Backward compatibility
     _upgrade_older_node(self.data)
 
-  def get_template_name(self):
+  def get_template_name(self, mapping=None):
+    if mapping is None:
+      mapping = {}
+
     node_type = self.data['type']
     if self.data['type'] == JavaDocumentAction.TYPE:
       node_type = JavaAction.TYPE
@@ -1000,6 +1000,8 @@ class Node():
       node_type = ShellAction.TYPE
     elif self.data['type'] == AltusAction.TYPE:
       node_type = ShellAction.TYPE
+    elif mapping.get('cluster') and 'document' in node_type:
+      node_type = ShellAction.TYPE
 
     return 'editor2/gen/workflow-%s.xml.mako' % node_type
 

+ 1 - 4
apps/oozie/src/oozie/views/editor2.py

@@ -406,10 +406,7 @@ def _submit_workflow_helper(request, workflow, submit_action):
         mapping['submit_single_action'] = True
 
       if 'altus' in cluster.get('type', ''):
-        notebook = {}
-        snippet = {'statement': 'SELECT 1', 'type': 'hive'}
-        handle = DataEngApi(user=request.user, request=request, cluster_name=cluster.get('name')).execute(notebook, snippet)
-        return JsonResponse({'status': 0, 'job_id': handle.get('id'), 'type': 'Altus HIVE'}, safe=False)
+        mapping['cluster'] = cluster.get('id')
 
       try:
         job_id = _submit_workflow(request.user, request.fs, request.jt, workflow, mapping)

+ 94 - 8
desktop/libs/liboozie/src/liboozie/submission2.py

@@ -210,7 +210,9 @@ class Submission(object):
           self.job.override_subworkflow_id(action, workflow.id) # For displaying the correct graph
           self.properties['workspace_%s' % workflow.uuid] = workspace # For pointing to the correct workspace
 
-        elif action.data['type'] == 'altus':
+        elif action.data['type'] == 'altus' or (action.data['type'] == 'spark-document' and 'altus' in self.properties.get('cluster', '')):
+          is_altus_job = 'altus' in self.properties.get('cluster', '')
+
           self._create_file(deployment_dir, action.data['name'] + '.sh', '''#!/usr/bin/env bash
 
 export PYTHONPATH=`pwd`
@@ -221,13 +223,31 @@ python altus.py
 
           ''')
 
-          shell_script = self._generate_altus_action_script(
-            service=action.data['properties'].get('service'),
-            command=action.data['properties'].get('command'),
-            arguments=dict([arg.split('=', 1) for arg in action.data['properties'].get('arguments', [])]),
-            auth_key_id=ALTUS.AUTH_KEY_ID.get(),
-            auth_key_secret=ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')
-          )
+          if is_altus_job:
+            shell_script = self._generate_altus_job_action_script(
+                service='dataeng',
+                cluster=self.properties['cluster'],
+                jobs=[{
+                    'sparkJob': {
+                        'jars': [u's3a://datawarehouse-customer360/ETL/spark-examples.jar'],
+                        'mainClass': u'org.apache.spark.examples.SparkPi ',
+                        'applicationArguments': [u'10']
+                      },
+                    'name': None,
+                    'failureAction': 'NONE'
+                }],
+                auth_key_id=ALTUS.AUTH_KEY_ID.get(),
+                auth_key_secret=ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')
+            )
+          else:
+            shell_script = self._generate_altus_action_script(
+                service=action.data['properties'].get('service'),
+                command=action.data['properties'].get('command'),
+                arguments=dict([arg.split('=', 1) for arg in action.data['properties'].get('arguments', [])]),
+                auth_key_id=ALTUS.AUTH_KEY_ID.get(),
+                auth_key_secret=ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')
+            )
+            
           self._create_file(deployment_dir, 'altus.py', shell_script)
 
           ext_py_lib_path = os.path.join(get_desktop_root(), 'core', 'ext-py')
@@ -600,6 +620,72 @@ print _exec('%(service)s', '%(command)s', %(args)s)
       'auth_key_secret': auth_key_secret
     }
 
+  def _generate_altus_job_action_script(self, service, cluster, jobs, auth_key_id, auth_key_secret):
+    if service == 'analyticdb' or service == 'dataware':
+      hostname = ALTUS.HOSTNAME_ANALYTICDB.get()
+    elif service == 'dataeng':
+      hostname = ALTUS.HOSTNAME_DATAENG.get()
+    elif service == 'wa':
+      hostname = ALTUS.HOSTNAME_WA.get()
+    else:
+      hostname = ALTUS.HOSTNAME.get()
+
+    return """#!/usr/bin/env python
+
+import time
+
+from ast import literal_eval
+
+from navoptapi.api_lib import ApiLib
+
+hostname = '%(hostname)s'
+cluster = '%(cluster)s'
+auth_key_id = '%(auth_key_id)s'
+auth_key_secret = '''%(auth_key_secret)s'''
+
+def _exec(service, command, parameters=None):
+  if parameters is None:
+    parameters = {}
+
+  try:
+    api = ApiLib(service, hostname, auth_key_id, auth_key_secret)
+    resp = api.call_api(command, parameters)
+    return resp.json()
+  except Exception, e:
+    print e
+    raise e
+
+
+try:
+  handle = _exec('%(service)s', 'submitJobs', {'clusterName': cluster, 'jobs': literal_eval("%(jobs)s")})
+  
+  job_id = handle['jobs'][0]['jobId']
+  status = 'QUEUED'
+  print 'Job submitted: %%s' %% job_id
+
+  while status in ('QUEUED', 'RUNNING', 'SUBMITTING'):
+    time.sleep(5)
+
+    print 'Checking status...'
+    status = _exec('%(service)s', 'describeJob', {'jobId': job_id})['job']['status']
+
+  if status != 'COMPLETED':
+    raise Exception('Job %%s failed %%s' %% (job_id, status))
+  else:
+    print 'Job %%s completed successfully' %% job_id
+except Exception, e:
+  print e
+  raise e
+
+""" % {
+      'hostname': hostname,
+      'service': service,
+      'cluster': cluster,
+      'jobs': repr(jobs),
+      'auth_key_id': auth_key_id,
+      'auth_key_secret': auth_key_secret
+    }
+
 def create_directories(fs, directory_list=[]):
   # If needed, create the remote home, deployment and data directories
   directories = [REMOTE_DEPLOYMENT_DIR.get()] + directory_list