瀏覽代碼

HUE-546. jobsubd should use a single file for Hadoop delegation tokens when submitting jobs

Aaron T. Myers 14 年之前
父節點
當前提交
9677859135

+ 55 - 43
apps/jobsub/src/jobsub/server.py

@@ -236,52 +236,64 @@ class PlanRunner(object):
       'LANG': os.getenv('LANG', i18n.get_site_encoding()),
       'LANG': os.getenv('LANG', i18n.get_site_encoding()),
     }
     }
 
 
-    delegation_token_files = []
     all_clusters = []
     all_clusters = []
     all_clusters += all_mrclusters().values()
     all_clusters += all_mrclusters().values()
     all_clusters += get_all_hdfs().values()
     all_clusters += get_all_hdfs().values()
-    LOG.info("all_clusters: %s" % (repr(all_clusters),))
-    for cluster in all_clusters:
-      if cluster.security_enabled:
-        cluster.setuser(self.plan.user)
-        token = cluster.get_delegation_token()
-        token_file = tempfile.NamedTemporaryFile()
-        token_file.write(token.delegationTokenBytes)
-        token_file.flush()
-        delegation_token_files.append(token_file)
-
-    if delegation_token_files:
-      env['HADOOP_TOKEN_FILE_LOCATION'] = ','.join([token_file.name for token_file in delegation_token_files])
-
-    java_home = os.getenv('JAVA_HOME')
-    if java_home:
-      env["JAVA_HOME"] = java_home
-    for k, v in env.iteritems():
-      assert v is not None, "Environment key %s missing value." % k
-
-    args = [ hadoop.conf.HADOOP_BIN.get() ]
-    if hadoop.conf.HADOOP_CONF_DIR.get():
-      args.append("--config")
-      args.append(hadoop.conf.HADOOP_CONF_DIR.get())
-
-    args += step.arguments
-    LOG.info("Starting %s.  (Env: %s)", repr(args), repr(env))
-    LOG.info("Running: %s" % " ".join(args))
-    self.pipe = subprocess.Popen(
-      args,
-      stdin=None,
-      cwd=self.work_dir,
-      stdout=self.stdout,
-      stderr=self.stderr,
-      shell=False,
-      close_fds=True,
-      env=env)
-    retcode = self.pipe.wait()
-    if 0 != retcode:
-      raise Exception("bin/hadoop returned non-zero %d" % retcode)
-    LOG.info("bin/hadoop returned %d" % retcode)
-    for token_file in delegation_token_files:
-      token_file.close()
+    delegation_token_files = []
+    merged_token_file = tempfile.NamedTemporaryFile()
+    try:
+      LOG.debug("all_clusters: %s" % (repr(all_clusters),))
+      for cluster in all_clusters:
+        if cluster.security_enabled:
+          cluster.setuser(self.plan.user)
+          token = cluster.get_delegation_token()
+          token_file = tempfile.NamedTemporaryFile()
+          token_file.write(token.delegationTokenBytes)
+          token_file.flush()
+          delegation_token_files.append(token_file)
+  
+      java_home = os.getenv('JAVA_HOME')
+      if java_home:
+        env["JAVA_HOME"] = java_home
+      for k, v in env.iteritems():
+        assert v is not None, "Environment key %s missing value." % k
+  
+      base_args = [ hadoop.conf.HADOOP_BIN.get() ]
+      if hadoop.conf.HADOOP_CONF_DIR.get():
+        base_args.append("--config")
+        base_args.append(hadoop.conf.HADOOP_CONF_DIR.get())
+  
+      if delegation_token_files:
+        args = list(base_args) # Make a copy of the base args.
+        args += ['jar', hadoop.conf.CREDENTIALS_MERGER_JAR.get(), merged_token_file.name]
+        args += [token_file.name for token_file in delegation_token_files]
+        LOG.debug("merging credentials files with comand: '%s'" % (' '.join(args),))
+        merge_pipe = subprocess.Popen(args, shell=False, close_fds=True)
+        retcode = merge_pipe.wait()
+        if 0 != retcode:
+          raise Exception("bin/hadoop returned non-zero %d while trying to merge credentials" % (retcode,))
+        env['HADOOP_TOKEN_FILE_LOCATION'] = merged_token_file.name
+  
+      args = list(base_args) # Make a copy of the base args.
+      args += step.arguments
+      LOG.info("Starting %s.  (Env: %s)", repr(args), repr(env))
+      LOG.info("Running: %s" % " ".join(args))
+      self.pipe = subprocess.Popen(
+        args,
+        stdin=None,
+        cwd=self.work_dir,
+        stdout=self.stdout,
+        stderr=self.stderr,
+        shell=False,
+        close_fds=True,
+        env=env)
+      retcode = self.pipe.wait()
+      if 0 != retcode:
+        raise Exception("bin/hadoop returned non-zero %d" % retcode)
+      LOG.info("bin/hadoop returned %d" % retcode)
+    finally:
+      for token_file in delegation_token_files + [merged_token_file]:
+        token_file.close()
 
 
 class JobSubmissionServiceImpl(object):
 class JobSubmissionServiceImpl(object):
   @coerce_exceptions
   @coerce_exceptions

+ 23 - 1
desktop/libs/hadoop/Makefile

@@ -57,7 +57,13 @@ SUDO_SHELL_BUILD := $(SUDO_SHELL_DIR)/target
 SUDO_SHELL_LIB := $(SUDO_SHELL_DIR)/java-lib
 SUDO_SHELL_LIB := $(SUDO_SHELL_DIR)/java-lib
 SUDO_SHELL := $(SUDO_SHELL_LIB)/sudo-shell-$(DESKTOP_VERSION).jar
 SUDO_SHELL := $(SUDO_SHELL_LIB)/sudo-shell-$(DESKTOP_VERSION).jar
 
 
-compile: $(PLUGIN) $(STATIC_GROUP_MAPPING) $(SUDO_SHELL)
+CREDENTIALS_MERGER_DIR := $(realpath .)/credentials-merger
+CREDENTIALS_MERGER_SRC := $(CREDENTIALS_MERGER_DIR)/src
+CREDENTIALS_MERGER_BUILD := $(CREDENTIALS_MERGER_DIR)/target
+CREDENTIALS_MERGER_LIB := $(CREDENTIALS_MERGER_DIR)/java-lib
+CREDENTIALS_MERGER := $(CREDENTIALS_MERGER_LIB)/credentials-merger-$(DESKTOP_VERSION).jar
+
+compile: $(PLUGIN) $(STATIC_GROUP_MAPPING) $(SUDO_SHELL) $(CREDENTIALS_MERGER)
 
 
 # Build the plugin jar iff the source is present
 # Build the plugin jar iff the source is present
 ifneq (,$(wildcard $(PLUGIN_JAVA_DIR)))
 ifneq (,$(wildcard $(PLUGIN_JAVA_DIR)))
@@ -107,6 +113,22 @@ $(SUDO_SHELL):
 	$(error cannot build sudo-shell jar without source)
 	$(error cannot build sudo-shell jar without source)
 endif
 endif
 
 
+# Build the credentials merger jar iff the source is present
+ifneq (,$(wildcard $(CREDENTIALS_MERGER_SRC)))
+$(CREDENTIALS_MERGER): $(shell find $(CREDENTIALS_MERGER_SRC) -type f)
+	mkdir -p $(CREDENTIALS_MERGER_LIB)
+	@echo "--- Building credentials merger tool"
+	cd $(CREDENTIALS_MERGER_DIR) && mvn clean install -DskipTests
+	cp $(CREDENTIALS_MERGER_BUILD)/credentials-merger-$(MAVEN_VERSION).jar $(CREDENTIALS_MERGER)
+
+clean::
+	rm -rf $(CREDENTIALS_MERGER_LIB)
+	cd $(CREDENTIALS_MERGER_DIR) && mvn clean ||:
+else
+$(CREDENTAILS_MERGER):
+	$(error cannot build credentials merger jar without source)
+endif
+
 #
 #
 # Tell the `bdist' target to exclude our java source.
 # Tell the `bdist' target to exclude our java source.
 #
 #

+ 72 - 0
desktop/libs/hadoop/credentials-merger/pom.xml

@@ -0,0 +1,72 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to Cloudera, Inc. under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<project>
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>com.cloudera.hue</groupId>
+    <artifactId>hue-parent</artifactId>
+    <version>1.2.0-SNAPSHOT</version>
+  </parent>
+
+  <groupId>com.cloudera.hue</groupId>
+  <artifactId>credentials-merger</artifactId>
+  <packaging>jar</packaging>
+
+  <name>Hue Credentials File Merger Utility</name>
+
+  <scm>
+   <connection>scm:git:git://github.com/cloudera/hue.git</connection>
+   <developerConnection>scm:git:git@github.com:cloudera/hue.git</developerConnection>
+   <url>https://github.com/cloudera/hue</url>
+  </scm>  
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-core</artifactId>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <mainClass>com.cloudera.hue.CredentialsMerger</mainClass>
+            </manifest>
+          </archive>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
+      

+ 64 - 0
desktop/libs/hadoop/credentials-merger/src/main/java/com/cloudera/hue/CredentialsMerger.java

@@ -0,0 +1,64 @@
+// Licensed to Cloudera, Inc. under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  Cloudera, Inc. licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package com.cloudera.hue;
+
+import java.io.IOException;
+import java.io.File;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+
+/**
+ * A tool to merge the credentials of multiple distinct files containing Hadoop
+ * delegation tokens into a single file.
+ */
+public class CredentialsMerger {
+  
+  /**
+   * Merge several credentials files into one. Give the desired output file
+   * first, followed by all of the input files.
+   *
+   * @param args &lt;out&gt; &lt;in1&gt; ...
+   * @throws IOException  in the event of an error reading or writing files.
+   */
+  public static void main(String[] args) throws IOException {
+    if (args.length < 2) {
+      printUsage();
+      System.exit(1);
+    }
+
+    Path outputFile = new Path("file://" + new File(args[0]).getAbsolutePath());
+    Configuration conf = new Configuration();
+    Credentials credentials = new Credentials();
+    for (int i = 1; i < args.length; i++) {
+      Credentials singleFileCredentials = Credentials.readTokenStorageFile(
+          new Path("file://" + new File(args[i]).getAbsolutePath()), conf);
+      credentials.addAll(singleFileCredentials);
+    }
+
+    credentials.writeTokenStorageFile(outputFile, conf);
+  }
+  
+  /**
+   * Show command usage.
+   */
+  private static void printUsage() {
+    System.err.println("Usage: " + CredentialsMerger.class.getCanonicalName()
+        + " <dst> <src> ...");
+  }
+
+}

+ 7 - 0
desktop/libs/hadoop/src/hadoop/conf.py

@@ -111,6 +111,13 @@ SUDO_SHELL_JAR = Config("hadoop_sudo_shell_jar",
                            root=os.path.dirname(__file__)),
                            root=os.path.dirname(__file__)),
   private=True)
   private=True)
 
 
+CREDENTIALS_MERGER_JAR = Config("hadoop_credentials_merger_jar",
+  help="Tool that is capable of merging multiple files containing delegation tokens into one.",
+  type=str,
+  dynamic_default=find_jar("../../credentials-merger/java-lib/credentials-merger-*.jar",
+                           root=os.path.dirname(__file__)),
+  private=True)
+
 HDFS_CLUSTERS = UnspecifiedConfigSection(
 HDFS_CLUSTERS = UnspecifiedConfigSection(
   "hdfs_clusters",
   "hdfs_clusters",
   help="One entry for each HDFS cluster",
   help="One entry for each HDFS cluster",