Bläddra i källkod

[jobsub] Remove java from jobsub

We're rewriting jobsub to use Oozie later. I'm doing this now so that it'd
build against CDH4.
bc Wong 13 år sedan
förälder
incheckning
a6e2f4d568

+ 0 - 37
apps/jobsub/Makefile

@@ -23,40 +23,3 @@ endif
 APP_NAME = hue-jobsub
 include $(ROOT)/Makefile.sdk
 
-#############################
-# Jobsub specific
-#############################
-
-BLD_DIR_JOBSUB := $(BUILD_DIR)/java
-JOBSUB_JAVA_LIB := $(APP_ROOT)/java-lib
-
-JOBSUB := $(JOBSUB_JAVA_LIB)/trace.jar
-JOBSUB_JAVA_DIR := $(APP_ROOT)/src/jobsub/java
-
-#
-# Compile jobsub
-#
-compile: $(JOBSUB)
-
-clean::
-	rm -Rf $(JOBSUB_JAVA_LIB)
-
-ifneq (,$(wildcard $(JOBSUB_JAVA_DIR)))
-JOBSUB_OPTS := -Daspectj.dir=$(ASPECTJ_DIR) \
-	       -Dbuild.dir=$(BLD_DIR_JOBSUB) \
-	       -Dhadoop.home=$(HADOOP_HOME)
-$(JOBSUB): $(shell find $(JOBSUB_JAVA_DIR))
-	@echo "--- Building Desktop jobsub"
-	cd $(JOBSUB_JAVA_DIR) && ant $(ANT_OPTS) $(JOBSUB_OPTS) jar
-	@mkdir -p $(JOBSUB_JAVA_LIB)
-	@cp $(BLD_DIR_JOBSUB)/*.jar $@
-else
-$(JOBSUB):
-	$(error Cannot build jobsub java without java source)
-endif
-
-#
-# Tell the `bdist' target to exclude our java source.
-#
-BDIST_EXCLUDES += \
-	--exclude=src/jobsub/java

+ 0 - 4
apps/jobsub/regenerate_thrift.sh

@@ -1,4 +0,0 @@
-#!/bin/bash
-
-cd $(dirname $0)
-thrift -r --gen py:new_style -o . src/jobsub/jobsubd.thrift

+ 1 - 2
apps/jobsub/setup.py

@@ -25,6 +25,5 @@ setup(
       packages = find_packages('src'),
       package_dir = {'': 'src'},
       install_requires = ['setuptools', 'desktop'],
-      entry_points = { 'desktop.supervisor.specs': [ 'jobsubd = jobsub:SUPERVISOR_SPEC' ],
-                       'desktop.sdk.application': 'jobsub=jobsub' },
+      entry_points = { 'desktop.sdk.application': 'jobsub=jobsub' },
 )

+ 0 - 3
apps/jobsub/src/jobsub/__init__.py

@@ -14,6 +14,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from desktop.supervisor import DjangoCommandSupervisee
-
-SUPERVISOR_SPEC = DjangoCommandSupervisee("jobsubd")

+ 0 - 61
apps/jobsub/src/jobsub/java/build.xml

@@ -1,61 +0,0 @@
-<!--
-  Licensed to Cloudera, Inc. under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  Cloudera, Inc. licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<!-- Ant build file for jobsub -->
-
-<project name="jobsub" default="build" basedir=".">
-  <property name="src.dir" location="src" />
-  <property name="build.dir" location="build" />
-  <property name="aspectj.dir" location="/usr/share/java/" />
-
-  <path id="build.classpath">
-    <fileset dir="${hadoop.home}">
-      <include name="hadoop*core*.jar" />
-      <include name="lib/*.jar" />
-    </fileset>
-    <pathelement location="${aspectj.dir}/aspectjrt.jar" />
-  </path>
-
-  <taskdef 
-      resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
-    <classpath>
-      <pathelement location="${aspectj.dir}/aspectjtools.jar" />
-    </classpath>
-  </taskdef>
-
-  <target name="init">
-    <mkdir dir="${build.dir}" />
-  </target>
-
-  <target name="build" depends="init">
-    <iajc sourceroots="${src.dir}"
-          destDir="${build.dir}/out" source="1.5">
-      <classpath refid="build.classpath" />
-    </iajc>
-  </target>
-
-  <target name="jar" depends="build">
-    <copy todir="${build.dir}/out">
-      <fileset dir="${src.dir}" includes="META-INF/*" />
-    </copy>
-    <jar destfile="${build.dir}/trace.jar" basedir="${build.dir}/out" />
-  </target>
-
-  <target name="clean">
-    <delete dir="${build.dir}" />
-  </target>
-</project>

+ 0 - 24
apps/jobsub/src/jobsub/java/src/META-INF/aop-ajc.xml

@@ -1,24 +0,0 @@
-<!--
-  Licensed to Cloudera, Inc. under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  Cloudera, Inc. licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<aspectj>
-<aspects>
-<aspect name="org.apache.hadoop.security.JobClientTrace"/>
-<aspect name="org.apache.hadoop.security.UgiFixer"/>
-</aspects>
-</aspectj>
-

+ 0 - 60
apps/jobsub/src/jobsub/java/src/org/apache/hadoop/security/JobClientTrace.aj

@@ -1,60 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package org.apache.hadoop.security;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.security.Credentials;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public aspect JobClientTrace {
-  private static final Log LOG = LogFactory.getLog(JobClientTracer.class);
-
-  JobClientTrace() {
-    LOG.info("Hue job submission aspect loaded.");
-  }
-
-  RunningJob around(JobConf conf):
-    call(RunningJob JobClient.submitJobInternal(JobConf)) && args(conf) {
-    RunningJob ret = proceed(conf);
-    JobClientTracer.getInstance().submittedJob(ret);
-    return ret;
-  }
-
-  /**
-   * The JobClient assumes that the user is authenticated via kerberos for the
-   * purpose of job submission, and thus that delegation tokens for the NN(s)
-   * can be retrieved. This doesn't work, however, in the case that the job is
-   * actually being submitted to the JT via an MR delegation token, because new
-   * delegation tokens can't be retrieved if one is only authenticated via
-   * delegation tokens. The work-around here is to pre-fetch NN delegation
-   * tokens from jobsubd, and pass them in. We set the
-   * <code>mapreduce.job.credentials.binary</code> here because this happens
-   * immediately before we try to fill the token cache with delegation tokens
-   * from the NN(s).
-   */
-  void around(Configuration conf, Credentials credentials):
-    call(void JobClient.readTokensFromFiles(Configuration, Credentials)) && args(conf, credentials) {
-
-    String hadoopTokenFileLocation = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
-    if (hadoopTokenFileLocation != null) {
-      conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
-      conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
-    }
-    proceed(conf, credentials);
-  }
-}

+ 0 - 51
apps/jobsub/src/jobsub/java/src/org/apache/hadoop/security/JobClientTracer.java

@@ -1,51 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package org.apache.hadoop.security;
-
-import org.apache.hadoop.mapred.RunningJob;
-import java.io.PrintStream;
-import java.io.IOException;
-
-/**
- * Records JobIDs into a file specified by $DEFAULT_JOB_LOG.
- */
-public class JobClientTracer {
-  private static JobClientTracer instance = new JobClientTracer();
-  private PrintStream reportStream;
-
-  public static final String DEFAULT_JOB_LOG = "reported-jobs.txt";
-
-  public synchronized static JobClientTracer getInstance() {
-    return instance;
-  }
-
-  private JobClientTracer() {
-    try {
-      String filename = System.getenv("HUE_JOBTRACE_LOG");
-      if (filename == null) {
-        filename = DEFAULT_JOB_LOG;
-      }
-      reportStream = new PrintStream(filename);
-    } catch (IOException ioe) {
-      throw new RuntimeException(ioe);
-    }
-  }
-
-  public void submittedJob(RunningJob job) {
-    reportStream.println(job.getJobID());
-    reportStream.flush();
-  }
-}

+ 0 - 97
apps/jobsub/src/jobsub/java/src/org/apache/hadoop/security/UgiFixer.aj

@@ -1,97 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package org.apache.hadoop.security;
-
-import java.security.Principal;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.spi.LoginModule;
-
-import org.apache.hadoop.security.User;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Overrides the HadoopConfiguration.getAppConfigurationEntry to return the
- * user that Jobsub is running this job as.
- */
-public aspect UgiFixer {
-  private static final Log LOG = LogFactory.getLog(UgiFixer.class);
-
-  static {
-    LOG.info("Hue UGI fixer aspect loaded.");
-  }
-
-  private static final String USER_VAR = "HUE_JOBSUB_USER";
-  private static final String DEFAULT_USER = "default_jobsub_user";
-
-  private static final AppConfigurationEntry JOBSUB_LOGIN =
-    new AppConfigurationEntry(JobsubLoginModule.class.getName(),
-                              LoginModuleControlFlag.REQUIRED,
-                              new HashMap<String,String>());
-
-  private static final AppConfigurationEntry[] JOBSUB_CONF =
-    new AppConfigurationEntry[]{ JOBSUB_LOGIN };
-
-  public static class JobsubLoginModule implements LoginModule {
-    private Subject subject;
-
-    public boolean abort() throws LoginException {
-      return true;
-    }
-
-    public boolean commit() throws LoginException {
-      if (!subject.getPrincipals(User.class).isEmpty()) {
-        return true;
-      }
-      String user = System.getenv(USER_VAR);
-      subject.getPrincipals().add(new User(user == null ? DEFAULT_USER : user));
-      return true;
-    }
-
-    public void initialize(Subject subject, CallbackHandler callbackHandler,
-                           Map<String, ?> sharedState, Map<String, ?> options) {
-      this.subject = subject;
-    }
-
-    public boolean login() throws LoginException {
-      return true;
-    }
-
-    public boolean logout() throws LoginException {
-      return true;
-    }
-  }
-
-  pointcut getAppConfigurationEntry(String appName):
-    execution(AppConfigurationEntry[] Configuration.getAppConfigurationEntry(String)) && args(appName) && within(UserGroupInformation);
-
-  AppConfigurationEntry[] around(String appName):
-    getAppConfigurationEntry(appName) {
-      if (!appName.equals("hadoop-simple")) {
-        LOG.warn("getAppConfigurationEntry() called for auth method other than simple, returning JOBSUB_CONF anyway: " + appName);
-      }
-      return JOBSUB_CONF;
-    }
-}

+ 0 - 122
apps/jobsub/src/jobsub/jobsubd.thrift

@@ -1,122 +0,0 @@
-#!/usr/bin/env thrift -r --gen py:new_style -o ../../
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Structs for interacting with the job submission daemon.
-
-namespace py jobsubd
-
-struct SubmissionHandle {
-  /**
-  Unique id for this instance of job submission.
-  In practice this is a primary key in the
-  database.  This struct exists so that the
-  handle can evolve (for example, to multiple
-  submission servers).
-  */
-  1: i64 id
-}
-
-// States of a given job.
-enum State {
-  SUBMITTED = 1,
-  RUNNING = 2,
-  SUCCESS = 3,
-  // Indicates submission error
-  ERROR = 4,
-  // Like SUCCESS, but non-zero returncode
-  FAILURE = 5
-}
-
-// Information about a given submission
-struct JobData {
-  /** Job ID that the JobTracker has for this job */
-  1: list<string> hadoop_job_ids,
-  /** These are "tails" of the stdout/stderr from the Java process doing the 
-      job submission. */
-  2: string stdout_tail,
-  3: string stderr_tail,
-  4: State state
-}
-
-exception SubmissionError {
-  1: string message,
-  2: string detail
-}
-
-/**
-jar files that are included in the Hadoop distribution,
-and therefore need not to be copied.
-*/
-enum PreFabLocalizedFiles {
-  STREAMING = 1
-}
-
-/** File to be copied in a LocalizeFilesStep */
-struct LocalizedFile {
-  /** Name of file in current directory.
-      Framework has the right to make this a symlink. */
-  1: string target_name,
-  /** Exactly one of the following may be set */
-  2: string path_on_hdfs,
-  3: PreFabLocalizedFiles pre_fab_localized_file
-}
-
-/** Places files into the working directory of "jobsub plan execution". */
-struct LocalizeFilesStep {
-  1: list<LocalizedFile> localize_files;
-}
-
-/** 
- * Runs bin/hadoop, with enough environment to point at the
- * configured cluster, and to assume the correct user.
- */
-struct BinHadoopStep {
-  // Arguments to pass to "bin/hadoop"
-  1: list<string> arguments
-}
-
-/** 
-  A union of all possible steps.
-
-  Note: THRIFT-409 (committed Sep 1 09) added support for unions
-  to Thrift.  After a thrift release, we could switch to that syntax.
-  */
-struct SubmissionPlanStep {
-  /** Only one of these fields may be specified! */
-  1: LocalizeFilesStep localize_files_step,
-  2: BinHadoopStep bin_hadoop_step,
-}
-
-/** Plan to be executed by jobsub. */
-struct SubmissionPlan {
-  # Name is useful for debugging
-  1: string name, 
-  2: string user,
-  # Note that the first group herein is the user's "primary" group.
-  3: list<string> groups,
-  /** Steps to execute, in order. */
-  4: list<SubmissionPlanStep> steps,
-  /** Directory name in HDFS where stdout and stderr will be put */
-  5: string save_output
-}
-  
-service JobSubmissionService {
-  SubmissionHandle submit(1: SubmissionPlan plan) throws (1:SubmissionError error),
-  JobData get_job_data(1: SubmissionHandle handle) throws (1:SubmissionError error)
-}

+ 0 - 40
apps/jobsub/src/jobsub/management/commands/jobsubd.py

@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Starts the jobsubd server.
-"""
-
-import logging
-import sys
-
-from django.core.management.base import NoArgsCommand
-
-from jobsub import server
-
-LOG = logging.getLogger(__name__)
-
-class Command(NoArgsCommand):
-  """Starts jobsubd daemon."""
-  def handle_noargs(self, **options):
-    try:
-      server.main()
-    except Exception, ex:
-      LOG.exception(ex)
-      LOG.fatal('Jobsubd encountered uncaught exception. .')
-      sys.exit(1)
-    except KeyboardInterrupt, kbe:
-      sys.exit(2)