Ver código fonte

HUE-3224 [connector] Convert Pig connector to offer a new Pig snippet

Romain Rigaux 9 anos atrás
pai
commit
d5a511f6d9

+ 1 - 1
apps/oozie/src/oozie/conf.py

@@ -85,7 +85,7 @@ ENABLE_DOCUMENT_ACTION = Config(
   key="enable_document_action",
   help=_t("Flag to enable the saved Editor queries to be dragged and dropped into a workflow."),
   type=bool,
-  default=True
+  default=False
 )
 
 

+ 121 - 2
apps/oozie/src/oozie/models2.py

@@ -773,6 +773,20 @@ class Node():
       self.data['properties']['spark_opts'] = ' '.join(properties['spark_opts'])
       if len(properties['jars']) > 1:
         self.data['properties']['spark_opts'] += ' --py-files ' + ','.join([os.path.basename(f) for f in properties['jars'][1:]])
+    elif self.data['type'] == PigDocumentAction.TYPE:
+      notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
+      action = notebook.get_data()['snippets'][0]
+
+      name = '%s-%s' % (self.data['type'].split('-')[0], self.data['id'][:4])
+      self.data['properties']['script_path'] = "${wf:appPath()}/" + name + ".pig"
+      self.data['properties']['parameters'] = [{'value': prop} for prop in action['properties']['parameters']]
+      self.data['properties']['arguments'] = []
+      job_properties = []
+      for prop in action['properties']['hadoopProperties']:
+        name, value = prop.split('=', 1) 
+        job_properties.append({'name': name, 'value': value})
+        self.data['properties']['job_properties'] = job_properties 
+      self.data['properties']['files'] = [{'value': prop} for prop in action['properties']['parameters']]
 
     data = {
       'node': self.data,
@@ -2240,6 +2254,73 @@ class SparkDocumentAction(Action):
     return [cls.FIELDS['uuid']]
 
 
+class PigDocumentAction(Action):
+  TYPE = 'pig-document'
+  FIELDS = {
+    'uuid': {
+        'name': 'uuid',
+        'label': _('Spark program'),
+        'value': '',
+        'help_text': _('Select a saved Spark program you want to schedule.'),
+        'type': 'spark'
+     },
+     # Common
+     'files': {
+          'name': 'files',
+          'label': _('Files'),
+          'value': [],
+          'help_text': _('Files put in the running directory.'),
+          'type': ''
+     },
+     'archives': {
+          'name': 'archives',
+          'label': _('Archives'),
+          'value': [],
+          'help_text': _('zip, tar and tgz/tar.gz uncompressed into the running directory.'),
+          'type': ''
+     },
+     'job_properties': {
+          'name': 'job_properties',
+          'label': _('Hadoop job properties'),
+          'value': [],
+          'help_text': _('value, e.g. production'),
+          'type': ''
+     },
+     'prepares': {
+          'name': 'prepares',
+          'label': _('Prepares'),
+          'value': [],
+          'help_text': _('Path to manipulate before starting the application.'),
+          'type': ''
+     },
+     'job_xml': {
+          'name': 'job_xml',
+          'label': _('Job XML'),
+          'value': [],
+          'help_text': _('Refer to a Hadoop JobConf job.xml'),
+          'type': ''
+     },
+     'retry_max': {
+          'name': 'retry_max',
+          'label': _('Max retry'),
+          'value': [],
+          'help_text': _('Number of times, default is 3'),
+          'type': ''
+     },
+     'retry_interval': {
+          'name': 'retry_interval',
+          'label': _('Retry interval'),
+          'value': [],
+          'help_text': _('Wait time in minutes, default is 10'),
+          'type': ''
+     }
+  }
+
+  @classmethod
+  def get_mandatory_fields(cls):
+    return [cls.FIELDS['uuid']]
+
+
 class DecisionNode(Action):
   TYPE = 'decision'
   FIELDS = {}
@@ -3079,8 +3160,10 @@ class WorkflowBuilder():
         node = self.get_hive_document_node(document, user)
       elif document.type == 'query-spark2':
         node = self.get_spark_document_node(document, user)
+      elif document.type == 'query-pig':
+        node = self.get_pig_document_node(document, user)
       else:
-        raise PopupException(_('Snippet type %(type)s is not supported in batch execution.') % document.type)
+        raise PopupException(_('Snippet type %s is not supported in batch execution.') % document.type)
 
       nodes.append(node)
 
@@ -3102,7 +3185,7 @@ class WorkflowBuilder():
       elif snippet['type'] == 'query-hive':
         node = self.get_hive_snippet_node(snippet, user)
       else:
-        raise PopupException(_('Snippet type %(type)s is not supported in batch execution.') % snippet)
+        raise PopupException(_('Snippet type %s is not supported in batch execution.') % snippet)
 
       nodes.append(node)
 
@@ -3232,6 +3315,42 @@ class WorkflowBuilder():
 
     return node
 
+  def get_pig_document_node(self, document, user):
+    node = self._get_pig_node(document.uuid, is_document_node=True)
+
+    node['properties']['uuid'] = document.uuid
+
+    return node
+
+  def _get_pig_node(self, node_id, credentials=None, is_document_node=False):
+    if credentials is None:
+      credentials = []
+
+    return {
+        "id": node_id,
+        'name': 'pig-%s' % node_id[:4],
+        "type": "pig-document-widget",
+        "properties":{
+              "job_xml": [],
+              "jar_path": "",
+              "java_opts": [],
+              "retry_max": [],
+              "retry_interval": [],
+              "job_properties": [],
+              "capture_output": False,
+              "prepares": [],
+              "credentials": credentials,
+              "sla": [{"value":False, "key":"enabled"}, {"value":"${nominal_time}", "key":"nominal-time"}, {"value":"", "key":"should-start"}, {"value":"${30 * MINUTES}", "key":"should-end"}, {"value":"", "key":"max-duration"}, {"value":"", "key":"alert-events"}, {"value":"", "key":"alert-contact"}, {"value":"", "key":"notification-msg"}, {"value":"", "key":"upstream-apps"}],
+              "archives": []
+        },
+        "children": [
+            {"to": "33430f0f-ebfa-c3ec-f237-3e77efa03d0a"},
+            {"error": "17c9c895-5a16-7443-bb81-f34b30b21548"}
+        ],
+        "actionParameters": [],
+        "actionParametersFetched": False
+    }
+
   def _get_java_node(self, node_id, credentials=None, is_document_node=False):
     if credentials is None:
       credentials = []

+ 12 - 0
apps/oozie/src/oozie/models2_tests.py

@@ -1012,6 +1012,18 @@ class TestModelAPI(OozieMockBase):
     assert_equal(len(_data['workflow']['nodes']), 4)
 
 
+  def test_gen_pig_document(self):
+    notebook = make_notebook(name='Browse', editor_type='pig', statement='ls', status='ready')
+    notebook_doc, save_as = _save_notebook(notebook.get_data(), self.user)
+
+    workflow_doc = WorkflowBuilder().create_workflow(document=notebook_doc, user=self.user, managed=True)
+
+    workflow = Workflow(document=workflow_doc, user=self.user)
+
+    _data = workflow.get_data()
+    assert_equal(len(_data['workflow']['nodes']), 5)
+
+
   def test_gen_workflow_from_notebook(self):
     snippets = [
       {

+ 1 - 0
apps/oozie/src/oozie/templates/editor2/gen/workflow-pig-document.xml.mako

@@ -0,0 +1 @@
+workflow-pig.xml.mako

+ 1 - 1
desktop/conf.dist/hue.ini

@@ -664,7 +664,7 @@
 
     [[[pig]]]
       name=Pig
-      interface=pig
+      interface=oozie
 
     [[[text]]]
       name=Text

+ 1 - 1
desktop/conf/pseudo-distributed.ini.tmpl

@@ -672,7 +672,7 @@
 
     [[[pig]]]
       name=Pig
-      interface=pig
+      interface=oozie
 
     [[[text]]]
       name=Text

+ 7 - 0
desktop/libs/liboozie/src/liboozie/submission2.py

@@ -224,6 +224,13 @@ class Submission(object):
               paths.append(self.properties['oozie.libpath'])
             self.properties['oozie.libpath'] = ','.join(paths)
 
+        elif action.data['type'] == 'pig-document':
+          from notebook.models import Notebook
+          notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=action.data['properties']['uuid']))
+          statements = notebook.get_data()['snippets'][0]['statement_raw']
+
+          self._create_file(deployment_dir, action.data['name'] + '.pig', statements)
+
     oozie_xml = self.job.to_xml(self.properties)
     self._do_as(self.user.username, self._copy_files, deployment_dir, oozie_xml, self.properties)
 

+ 1 - 1
desktop/libs/notebook/src/notebook/conf.py

@@ -163,7 +163,7 @@ def _default_interpreters():
           'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
       }),
       ('pig', {
-          'name': 'Pig', 'interface': 'pig', 'options': {}
+          'name': 'Pig', 'interface': 'oozie', 'options': {}
       }),
       ('solr', {
           'name': 'Solr SQL', 'interface': 'solr', 'options': {}

+ 1 - 2
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -91,7 +91,6 @@ def get_api(request, snippet):
   from notebook.connectors.jdbc import JdbcApi
   from notebook.connectors.rdbms import RdbmsApi
   from notebook.connectors.oozie_batch import OozieApi
-  from notebook.connectors.pig_batch import PigApi
   from notebook.connectors.solr import SolrApi
   from notebook.connectors.spark_shell import SparkApi
   from notebook.connectors.spark_batch import SparkBatchApi
@@ -123,7 +122,7 @@ def get_api(request, snippet):
   elif interface == 'solr':
     return SolrApi(request.user, interpreter=interpreter)
   elif interface == 'pig':
-    return PigApi(user=request.user, request=request)
+    return OozieApi(user=request.user, request=request) # Backward compatibility until Hue 4
   else:
     raise PopupException(_('Notebook connector interface not recognized: %s') % interface)
 

+ 7 - 1
desktop/libs/notebook/src/notebook/connectors/oozie_batch.py

@@ -47,6 +47,7 @@ class OozieApi(Api):
   LOG_END_PATTERN = '<<< Invocation of Main class completed <<<'
   RESULTS_PATTERN = "(?P<results>>>> Invoking Beeline command line now >>>.+<<< Invocation of Beeline command completed <<<)"
   RESULTS_PATTERN_GENERIC = "(?P<results>>>> Invoking Main class now >>>.+<<< Invocation of Main class completed <<<)"
+  RESULTS_PATTERN_PIG = "(?P<results>>>> Invoking Pig command line now >>>.+<<< Invocation of Pig command completed <<<)"
 
   def __init__(self, *args, **kwargs):
     Api.__init__(self, *args, **kwargs)
@@ -192,7 +193,12 @@ class OozieApi(Api):
   def _get_results(self, log_output, action_type):
     results = ''
 
-    pattern = self.RESULTS_PATTERN if action_type == 'hive' else self.RESULTS_PATTERN_GENERIC
+    if action_type == 'hive':
+      pattern = self.RESULTS_PATTERN
+    elif action_type == 'pig':
+      pattern = self.RESULTS_PATTERN_PIG
+    else:
+      pattern = self.RESULTS_PATTERN_GENERIC
 
     re_results = re.compile(pattern, re.M | re.DOTALL)
     if re_results.search(log_output):

+ 0 - 176
desktop/libs/notebook/src/notebook/connectors/pig_batch.py

@@ -1,176 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import json
-import re
-import time
-
-from django.core.urlresolvers import reverse
-from django.http import QueryDict
-from django.utils.translation import ugettext as _
-
-from notebook.connectors.base import Api, QueryError
-
-
-LOG = logging.getLogger(__name__)
-
-
-try:
-  from pig import api
-  from pig.models import PigScript2, get_workflow_output, hdfs_link
-  from oozie.views.api import get_log as get_workflow_logs
-  from oozie.views.dashboard import check_job_access_permission, check_job_edition_permission
-except Exception, e:
-  LOG.exception('Pig application is not enabled: %s' % e)
-
-
-class PigApi(Api):
-
-  RESULTS_PATTERN = "(?P<results>>>> Invoking Pig command line now >>>.+<<< Invocation of Pig command completed <<<)"
-
-  def __init__(self, *args, **kwargs):
-    Api.__init__(self, *args, **kwargs)
-
-    self.fs = self.request.fs
-    self.jt = self.request.jt
-
-
-  def execute(self, notebook, snippet):
-
-    attrs = {
-      'script': snippet['statement'],
-      'name': snippet['properties'].get('name', 'Pig Snippet'),
-      'parameters': snippet['properties'].get('parameters'),
-      'resources': snippet['properties'].get('resources'),
-      'hadoopProperties': snippet['properties'].get('hadoopProperties')
-    }
-
-    pig_script = PigScript2(attrs)
-
-    params = json.dumps([])
-    oozie_id = api.get(self.fs, self.jt, self.user).submit(pig_script, params)
-
-    return {
-      'id': oozie_id,
-      'watchUrl': reverse('pig:watch', kwargs={'job_id': oozie_id}) + '?format=python',
-      'has_result_set': True,
-    }
-
-
-  def check_status(self, notebook, snippet):
-    job_id = snippet['result']['handle']['id']
-
-    oozie_workflow = check_job_access_permission(self.request, job_id)
-    logs, workflow_actions, is_really_done = self._get_log_output(oozie_workflow)
-    results = self._get_results(logs)
-
-    if is_really_done and not oozie_workflow.is_running():
-      if oozie_workflow.status in ('KILLED', 'FAILED'):
-        raise QueryError(_('The script failed to run and was stopped'))
-      if results:
-        status = 'available'
-      else:
-        status = 'running' # Tricky case when the logs are being moved by YARN at job completion
-    elif oozie_workflow.is_running():
-      status = 'running'
-    else:
-      status = 'failed'
-
-    return {
-        'status': status
-    }
-
-
-  def fetch_result(self, notebook, snippet, rows, start_over):
-    job_id = snippet['result']['handle']['id']
-
-    log_output = self.get_log(notebook, snippet)
-    results = self._get_results(log_output)
-
-    return {
-        'data':  [[line] for line in results.split('\n')], # hdfs_link()
-        'meta': [{'name': 'Header', 'type': 'STRING_TYPE', 'comment': ''}],
-        'type': 'table',
-        'has_more': False,
-    }
-
-
-  def cancel(self, notebook, snippet):
-    job_id = snippet['result']['handle']['id']
-
-    job = check_job_access_permission(self, job_id)
-    check_job_edition_permission(job, self.user)
-
-    api.get(self.fs, self.jt, self.user).stop(job_id)
-
-    return {'status': 0}
-
-
-  def get_log(self, notebook, snippet, startFrom=0, size=None):
-    job_id = snippet['result']['handle']['id']
-
-    oozie_workflow = check_job_access_permission(self.request, job_id)
-    logs, workflow_actions, is_really_done = self._get_log_output(oozie_workflow)
-    return logs
-
-
-  def progress(self, snippet, logs):
-    job_id = snippet['result']['handle']['id']
-
-    oozie_workflow = check_job_access_permission(self.request, job_id)
-    return oozie_workflow.get_progress(),
-
-
-  def close_statement(self, snippet):
-    pass
-
-
-  def close_session(self, session):
-    pass
-
-
-  def _get_log_output(self, oozie_workflow):
-    log_output = ''
-
-    q = QueryDict(self.request.GET, mutable=True)
-    q['format'] = 'python'  # Hack for triggering the good section in single_task_attempt_logs
-    self.request.GET = q
-
-    logs, workflow_actions, is_really_done = api.get(self.fs, self.jt, self.user).get_log(self.request, oozie_workflow,
-                                                                                          make_links=False)
-
-    if len(logs) > 0:
-      log_output = logs.values()[0]
-      if log_output.startswith('Unable to locate'):
-        LOG.debug('Failed to get job attempt logs, possibly due to YARN archiving job to JHS. Will sleep and try again.')
-        time.sleep(5.0)
-        logs, workflow_actions, is_really_done = api.get(self.fs, self.jt, self.user).get_log(self.request, oozie_workflow,
-                                                                                              make_links=False)
-        if len(logs) > 0:
-          log_output = logs.values()[0]
-
-    return log_output, workflow_actions, is_really_done
-
-
-  def _get_results(self, log_output):
-    results = ''
-    re_results = re.compile(self.RESULTS_PATTERN, re.M | re.DOTALL)
-    if re_results.search(log_output):
-      results = re.search(re_results, log_output).group('results').strip()
-    return results
-