浏览代码

HUE-1452 [oozie] Support workflow credentials

Parameter values are inserted at submission time.
The correct XML is generated in the workflow and ihve action.

Hue reads the values through its local hive-site.xml:
_CNF_METASTORE_SASL = 'hive.metastore.sasl.enabled'
_CNF_METASTORE_URIS = 'hive.metastore.uris'
_CNF_METASTORE_KERBEROS_PRINCIPAL = 'hive.metastore.kerberos.principal'

The credential names are hardcoded for now:
<credential name='hive_credentials' type='hcat'>

Tests added for hive-site.xml parsing and XML workflow generation.
Romain Rigaux 12 年之前
父节点
当前提交
9d72726

+ 37 - 0
apps/beeswax/src/beeswax/hive_site.py

@@ -14,6 +14,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 """
 Helper for reading hive-site.xml
 """
@@ -22,6 +23,7 @@ import errno
 import logging
 import os.path
 import re
+import socket
 
 from desktop.lib import security_util
 
@@ -35,6 +37,10 @@ _HIVE_SITE_PATH = None                  # Path to hive-site.xml
 _HIVE_SITE_DICT = None                  # A dictionary of name/value config options
 _METASTORE_LOC_CACHE = None
 
+_CNF_METASTORE_SASL = 'hive.metastore.sasl.enabled'
+_CNF_METASTORE_URIS = 'hive.metastore.uris'
+_CNF_METASTORE_KERBEROS_PRINCIPAL = 'hive.metastore.kerberos.principal'
+
 _CNF_HIVESERVER2_KERBEROS_PRINCIPAL = 'hive.server2.authentication.kerberos.principal'
 _CNF_HIVESERVER2_AUTHENTICATION = 'hive.server2.authentication'
 _CNF_HIVESERVER2_IMPERSONATION = 'hive.server2.enable.doAs'
@@ -61,6 +67,37 @@ def get_conf():
     _parse_hive_site()
   return _HIVE_SITE_DICT
 
+
+def get_metastore():
+  """
+  Get first metastore information from local hive-site.xml.
+  """
+  global _METASTORE_LOC_CACHE
+  if not _METASTORE_LOC_CACHE:
+    thrift_uris = get_conf().get(_CNF_METASTORE_URIS)
+    is_local = thrift_uris is None or thrift_uris == ''
+
+    if not is_local:
+      use_sasl = str(get_conf().get(_CNF_METASTORE_SASL, 'false')).lower() == 'true'
+      thrift_uri = thrift_uris.split(",")[0] # First URI
+      host = socket.getfqdn()
+      match = _THRIFT_URI_RE.match(thrift_uri)
+      if not match:
+        LOG.error('Cannot understand remote metastore uri "%s"' % thrift_uri)
+      else:
+        host, port = match.groups()
+      kerberos_principal = security_util.get_kerberos_principal(get_conf().get(_CNF_METASTORE_KERBEROS_PRINCIPAL, None), host)
+
+      _METASTORE_LOC_CACHE = {
+          'use_sasl': use_sasl,
+          'thrift_uri': thrift_uri,
+          'kerberos_principal': kerberos_principal
+      }
+    else:
+      LOG.error('Hue requires a remote metastore configuration')
+  return _METASTORE_LOC_CACHE
+
+
 def get_hiveserver2_kerberos_principal(hostname_or_ip):
   """
   Retrieves principal for HiveServer 2.

+ 32 - 3
apps/beeswax/src/beeswax/tests.py

@@ -60,6 +60,7 @@ from beeswax.server.dbms import QueryServerException
 from beeswax.server.hive_server2_lib import HiveServerClient,\
   PartitionValueCompatible, HiveServerTable
 from beeswax.test_base import BeeswaxSampleProvider
+from beeswax.hive_site import get_metastore
 
 
 
@@ -1334,7 +1335,7 @@ def test_hive_site():
 
     assert_equal(beeswax.hive_site.get_conf()['hive.metastore.warehouse.dir'], u'/abc')
     assert_equal(beeswax.hive_site.get_hiveserver2_kerberos_principal('localhost'), 'hs2test/test.com@TEST.COM')
-    assert_equal(beeswax.hive_site.get_hiveserver2_authentication(), 'NONE')
+    assert_equal(beeswax.hive_site.get_hiveserver2_authentication(), 'NOSASL')
   finally:
     beeswax.hive_site.reset()
     if saved is not None:
@@ -1393,7 +1394,7 @@ def test_hive_site_null_hs2krb():
 
     assert_equal(beeswax.hive_site.get_conf()['hive.metastore.warehouse.dir'], u'/abc')
     assert_equal(beeswax.hive_site.get_hiveserver2_kerberos_principal('localhost'), None)
-    assert_equal(beeswax.hive_site.get_hiveserver2_authentication(), 'NONE')
+    assert_equal(beeswax.hive_site.get_hiveserver2_authentication(), 'NOSASL')
   finally:
     beeswax.hive_site.reset()
     if saved is not None:
@@ -1679,6 +1680,34 @@ def test_hiveserver2_get_security():
       hive_site._HIVE_SITE_DICT.pop(hive_site._CNF_HIVESERVER2_AUTHENTICATION, None)
 
 
+def test_metastore_security():
+  tmpdir = tempfile.mkdtemp()
+  saved = None
+  try:
+    # We just replace the Beeswax conf variable
+    class Getter(object):
+      def get(self):
+        return tmpdir
+
+    xml = hive_site_xml(is_local=False, use_sasl=True, kerberos_principal='hive/_HOST@test.com')
+    file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+
+    beeswax.hive_site.reset()
+    saved = beeswax.conf.HIVE_CONF_DIR
+    beeswax.conf.HIVE_CONF_DIR = Getter()
+
+    metastore = get_metastore()
+
+    assert_true(metastore['use_sasl'])
+    assert_equal('thrift://darkside-1234:9999', metastore['thrift_uri'])
+    assert_equal('hive/darkside-1234@test.com', metastore['kerberos_principal'])
+  finally:
+    beeswax.hive_site.reset()
+    if saved is not None:
+      beeswax.conf.HIVE_CONF_DIR = saved
+    shutil.rmtree(tmpdir)
+
+
 def hive_site_xml(is_local=False, use_sasl=False, thrift_uris='thrift://darkside-1234:9999',
                   warehouse_dir='/abc', kerberos_principal='test/test.com@TEST.COM',
                   hs2_kerberos_principal='hs2test/test.com@TEST.COM',
@@ -1724,7 +1753,7 @@ def hive_site_xml(is_local=False, use_sasl=False, thrift_uris='thrift://darkside
       </property>
 
       <property>
-        <name>hive.metastore.sasl.enabled</name>
+        <name>hive.server2.authentication</name>
         <value>%(hs2_authentication)s</value>
       </property>
 

+ 3 - 1
apps/oozie/src/oozie/models.py

@@ -45,9 +45,11 @@ from desktop.models import Document
 from hadoop.fs.exceptions import WebHdfsException
 
 from hadoop.fs.hadoopfs import Hdfs
+from liboozie.conf import SECURITY_ENABLED
 from liboozie.submittion import Submission
 from liboozie.submittion import create_directories
 
+
 from oozie.conf import REMOTE_SAMPLE_DIR
 from oozie.utils import utc_datetime_format
 from oozie.timezones import TIMEZONES
@@ -464,7 +466,7 @@ class Workflow(Job):
     controls = oozie_workflow.get_control_flow_actions()
     WorkflowFormSet = inlineformset_factory(Workflow, Node, form=NodeMetaForm, max_num=0, can_order=False, can_delete=False)
     forms = WorkflowFormSet(instance=self).forms
-    template='editor/gen/workflow-graph-status.xml.mako'
+    template = 'editor/gen/workflow-graph-status.xml.mako'
 
     index = dict([(form.instance.id, form) for form in forms])
     actions_index = dict([(action.name, action) for action in actions])

+ 1 - 1
apps/oozie/src/oozie/templates/editor/gen/workflow-hive.xml.mako

@@ -17,7 +17,7 @@
 
 <%namespace name="common" file="workflow-common.xml.mako" />
 
-    <action name="${ node }">
+    <action name="${ node }"${ " cred='hive_credentials'" if mapping.get('is_kerberized_hive') else '' | n,unicode}>
         <hive xmlns="uri:oozie:hive-action:0.2">
             <job-tracker>${'${'}jobTracker}</job-tracker>
             <name-node>${'${'}nameNode}</name-node>

+ 14 - 0
apps/oozie/src/oozie/templates/editor/gen/workflow.xml.mako

@@ -29,6 +29,20 @@
     % endif
   </global>
   % endif
+  % if mapping.get('is_kerberized_hive'):
+  <credentials>
+    <credential name='hive_credentials' type='${ mapping['credential_type'] }'>
+      <property>
+        <name>hcat.metastore.uri</name>
+        <value>${ mapping['thrift_server'] }</value>
+      </property>
+      <property>
+        <name>hcat.metastore.principal</name>
+        <value>${ mapping['hive_principal'] }</value>
+      </property>
+    </credential>
+   </credentials>
+  % endif
   % for node in workflow.node_list:
       ${ node.to_xml(mapping) | n }
   % endfor

+ 128 - 1
apps/oozie/src/oozie/tests.py

@@ -21,6 +21,8 @@ import logging
 import re
 import os
 import StringIO
+import shutil
+import tempfile
 import zipfile
 
 from itertools import chain
@@ -48,7 +50,6 @@ from oozie.importlib.workflows import import_workflow
 from oozie.importlib.jobdesigner import convert_jobsub_design
 
 
-
 LOG = logging.getLogger(__name__)
 
 
@@ -1130,6 +1131,132 @@ class TestEditor(OozieMockBase):
     </action>""" in xml, xml)
 
 
+  def test_workflow_hive_gen_xml(self):
+    self.wf.node_set.filter(name='action-name-1').delete()
+
+    action1 = add_node(self.wf, 'action-name-1', 'hive', [self.wf.start], {
+        u'job_xml': 'my-job.xml',
+        u'files': '["hello.py"]',
+        u'name': 'MyHive',
+        u'job_properties': '[]',
+        u'script_path': 'hello.sql',
+        u'archives': '[]',
+        u'prepares': '[]',
+        u'params': '[{"value":"World!","type":"argument"}]',
+        u'description': ''
+    })
+    Link(parent=action1, child=self.wf.end, name="ok").save()
+
+    xml = self.wf.to_xml()
+
+    assert_true("""
+<workflow-app name="wf-name-1" xmlns="uri:oozie:workflow:0.4">
+  <global>
+      <job-xml>jobconf.xml</job-xml>
+            <configuration>
+                <property>
+                    <name>sleep-all</name>
+                    <value>${SLEEP}</value>
+                </property>
+            </configuration>
+  </global>
+    <start to="MyHive"/>
+    <action name="MyHive">
+        <hive xmlns="uri:oozie:hive-action:0.2">
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+              <job-xml>my-job.xml</job-xml>
+            <script>hello.sql</script>
+              <argument>World!</argument>
+            <file>hello.py#hello.py</file>
+        </hive>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>""" in xml, xml)
+
+    import beeswax
+    from beeswax.tests import hive_site_xml
+
+    tmpdir = tempfile.mkdtemp()
+    saved = None
+    try:
+      # We just replace the Beeswax conf variable
+      class Getter(object):
+        def get(self):
+          return tmpdir
+
+      xml = hive_site_xml(is_local=False, use_sasl=True, kerberos_principal='hive/_HOST@test.com')
+      file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+
+      beeswax.hive_site.reset()
+      saved = beeswax.conf.HIVE_CONF_DIR
+      beeswax.conf.HIVE_CONF_DIR = Getter()
+
+      xml = self.wf.to_xml(mapping={
+         'is_kerberized_hive': True,
+         'credential_type': 'hcat',
+         'thrift_server': 'thrift://darkside-1234:9999',
+         'hive_principal': 'hive/darkside-1234@test.com'
+      })
+
+      assert_true("""
+<workflow-app name="wf-name-1" xmlns="uri:oozie:workflow:0.4">
+  <global>
+      <job-xml>jobconf.xml</job-xml>
+            <configuration>
+                <property>
+                    <name>sleep-all</name>
+                    <value>${SLEEP}</value>
+                </property>
+            </configuration>
+  </global>
+  <credentials>
+    <credential name='hive_credentials' type='hcat'>
+      <property>
+        <name>hcat.metastore.uri</name>
+        <value>thrift://darkside-1234:9999</value>
+      </property>
+      <property>
+        <name>hcat.metastore.principal</name>
+        <value>hive/darkside-1234@test.com</value>
+      </property>
+    </credential>
+   </credentials>
+    <start to="MyHive"/>
+    <action name="MyHive" cred='hive_credentials'>
+        <hive xmlns="uri:oozie:hive-action:0.2">
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+              <job-xml>my-job.xml</job-xml>
+            <script>hello.sql</script>
+              <argument>World!</argument>
+            <file>hello.py#hello.py</file>
+        </hive>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>""" in xml, xml)
+
+    finally:
+      beeswax.hive_site.reset()
+      if saved is not None:
+        beeswax.conf.HIVE_CONF_DIR = saved
+      shutil.rmtree(tmpdir)
+
+
+    self.wf.node_set.filter(name='action-name-1').delete()
+
+
+
   def test_create_coordinator(self):
     create_coordinator(self.wf, self.c, self.user)
 

+ 12 - 0
desktop/libs/liboozie/src/liboozie/submittion.py

@@ -186,6 +186,18 @@ class Submission(object):
         self.job.HUE_ID: self.job.id
       })
 
+    # Even if no Hive action for now
+    from beeswax.hive_site import get_metastore
+    metastore = get_metastore()
+
+    if metastore and metastore.get('use_sasl'):
+      self.properties.update({
+         'is_kerberized_hive': True,
+         'credential_type': 'hcat',
+         'thrift_server': metastore.get('thrift_uri'),
+         'hive_principal': metastore.get('kerberos_principal')
+      })
+
   def _create_deployment_dir(self):
     """
     Return the job deployment directory in HDFS, creating it if necessary.