浏览代码

HUE-1077 [jobsub] Add some examples

Add all examples from oozie that have a single action.
Abraham Elmahrek 12 年之前
父节点
当前提交
9033de2e15
共有 31 个文件被更改,包括 1578 次插入298 次删除
  1. 1 1
      apps/jobbrowser/src/jobbrowser/tests.py
  2. 0 0
      apps/oozie/examples/coordinators/range/empty
  3. 0 0
      apps/oozie/examples/managed/distcp/empty
  4. 0 0
      apps/oozie/examples/managed/email/empty
  5. 0 0
      apps/oozie/examples/managed/fs/empty
  6. 0 0
      apps/oozie/examples/managed/generic/empty
  7. 0 0
      apps/oozie/examples/managed/hive/hive-site.xml
  8. 0 0
      apps/oozie/examples/managed/hive/hive.sql
  9. 0 0
      apps/oozie/examples/managed/pig/aggregate.pig
  10. 0 0
      apps/oozie/examples/managed/shell/hello.py
  11. 0 0
      apps/oozie/examples/managed/sleep/empty
  12. 0 0
      apps/oozie/examples/managed/sleepfork/empty
  13. 0 0
      apps/oozie/examples/managed/sqoop/TT.java
  14. 0 0
      apps/oozie/examples/managed/sqoop/db.hsqldb.properties
  15. 0 0
      apps/oozie/examples/managed/sqoop/db.hsqldb.script
  16. 0 0
      apps/oozie/examples/managed/ssh/empty
  17. 0 0
      apps/oozie/examples/managed/terasort/empty
  18. 0 0
      apps/oozie/examples/unmanaged/distcp/empty
  19. 0 0
      apps/oozie/examples/unmanaged/email/empty
  20. 0 0
      apps/oozie/examples/unmanaged/fs/empty
  21. 49 0
      apps/oozie/examples/unmanaged/hive/hive-site.xml
  22. 1 0
      apps/oozie/examples/unmanaged/hive/hive.sql
  23. 8 0
      apps/oozie/examples/unmanaged/pig/aggregate.pig
  24. 6 0
      apps/oozie/examples/unmanaged/shell/hello.py
  25. 0 0
      apps/oozie/examples/unmanaged/sleep/empty
  26. 224 0
      apps/oozie/examples/unmanaged/sqoop/TT.java
  27. 17 0
      apps/oozie/examples/unmanaged/sqoop/db.hsqldb.properties
  28. 9 0
      apps/oozie/examples/unmanaged/sqoop/db.hsqldb.script
  29. 0 0
      apps/oozie/examples/unmanaged/ssh/empty
  30. 1254 288
      apps/oozie/src/oozie/fixtures/initial_oozie_examples.json
  31. 9 9
      apps/oozie/src/oozie/tests.py

+ 1 - 1
apps/jobbrowser/src/jobbrowser/tests.py

@@ -187,7 +187,7 @@ class TestJobBrowserWithHadoop(unittest.TestCase, OozieServerProvider):
 
     # Submit the job
     design_dict = json.loads(response.content)
-    design_id = int(design_dict['id'][0])
+    design_id = int(design_dict['id'])
     response = self.client.post(reverse('oozie:submit_workflow',
                                 args=[design_id]),
                                 data={u'form-MAX_NUM_FORMS': [u''],

+ 0 - 0
apps/oozie/examples/distcp/empty → apps/oozie/examples/coordinators/range/empty


+ 0 - 0
apps/oozie/examples/email/empty → apps/oozie/examples/managed/distcp/empty


+ 0 - 0
apps/oozie/examples/fs/empty → apps/oozie/examples/managed/email/empty


+ 0 - 0
apps/oozie/examples/generic/empty → apps/oozie/examples/managed/fs/empty


+ 0 - 0
apps/oozie/examples/range/empty → apps/oozie/examples/managed/generic/empty


+ 0 - 0
apps/oozie/examples/hive/hive-site.xml → apps/oozie/examples/managed/hive/hive-site.xml


+ 0 - 0
apps/oozie/examples/hive/hive.sql → apps/oozie/examples/managed/hive/hive.sql


+ 0 - 0
apps/oozie/examples/pig/aggregate.pig → apps/oozie/examples/managed/pig/aggregate.pig


+ 0 - 0
apps/oozie/examples/shell/hello.py → apps/oozie/examples/managed/shell/hello.py


+ 0 - 0
apps/oozie/examples/sleep/empty → apps/oozie/examples/managed/sleep/empty


+ 0 - 0
apps/oozie/examples/sleepfork/empty → apps/oozie/examples/managed/sleepfork/empty


+ 0 - 0
apps/oozie/examples/sqoop/TT.java → apps/oozie/examples/managed/sqoop/TT.java


+ 0 - 0
apps/oozie/examples/sqoop/db.hsqldb.properties → apps/oozie/examples/managed/sqoop/db.hsqldb.properties


+ 0 - 0
apps/oozie/examples/sqoop/db.hsqldb.script → apps/oozie/examples/managed/sqoop/db.hsqldb.script


+ 0 - 0
apps/oozie/examples/ssh/empty → apps/oozie/examples/managed/ssh/empty


+ 0 - 0
apps/oozie/examples/terasort/empty → apps/oozie/examples/managed/terasort/empty


+ 0 - 0
apps/oozie/examples/unmanaged/distcp/empty


+ 0 - 0
apps/oozie/examples/unmanaged/email/empty


+ 0 - 0
apps/oozie/examples/unmanaged/fs/empty


+ 49 - 0
apps/oozie/examples/unmanaged/hive/hive-site.xml

@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+<!-- Hive Configuration can either be stored in this file or in the hadoop configuration files  -->
+<!-- that are implied by Hadoop setup variables.                                                -->
+<!-- Aside from Hadoop setup variables - this file is provided as a convenience so that Hive    -->
+<!-- users do not have to edit hadoop configuration files (that may be managed as a centralized -->
+<!-- resource).                                                                                 -->
+
+
+<property>
+  <name>hive.server2.authentication</name>
+  <value>NOSASL</value>
+</property>
+
+
+<!-- Hive Execution Parameters -->
+
+<property>
+  <name>javax.jdo.option.ConnectionURL</name>
+  <value>jdbc:derby:;databaseName=/var/lib/hive/metastore/metastore_db;create=true</value>
+  <description>JDBC connect string for a JDBC metastore</description>
+</property>
+
+<property>
+  <name>javax.jdo.option.ConnectionDriverName</name>
+  <value>org.apache.derby.jdbc.EmbeddedDriver</value>
+  <description>Driver class name for a JDBC metastore</description>
+</property>
+
+</configuration>

+ 1 - 0
apps/oozie/examples/unmanaged/hive/hive.sql

@@ -0,0 +1 @@
+show tables;

+ 8 - 0
apps/oozie/examples/unmanaged/pig/aggregate.pig

@@ -0,0 +1,8 @@
+
+A = LOAD '$INPUT' AS (word:CHARARRAY, count:INT);
+
+B = FOREACH A GENERATE count, word;
+C = ORDER B BY count DESC;
+
+STORE C INTO '$OUTPUT';
+

+ 6 - 0
apps/oozie/examples/unmanaged/shell/hello.py

@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import sys
+
+print 'Hello ' + ', '.join(sys.argv[1:])
+

+ 0 - 0
apps/oozie/examples/unmanaged/sleep/empty


+ 224 - 0
apps/oozie/examples/unmanaged/sqoop/TT.java

@@ -0,0 +1,224 @@
+// ORM class for TT
+// WARNING: This class is AUTO-GENERATED. Modify at your own risk.
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.lib.db.DBWritable;
+import com.cloudera.sqoop.lib.JdbcWritableBridge;
+import com.cloudera.sqoop.lib.DelimiterSet;
+import com.cloudera.sqoop.lib.FieldFormatter;
+import com.cloudera.sqoop.lib.RecordParser;
+import com.cloudera.sqoop.lib.BooleanParser;
+import com.cloudera.sqoop.lib.BlobRef;
+import com.cloudera.sqoop.lib.ClobRef;
+import com.cloudera.sqoop.lib.LargeObjectLoader;
+import com.cloudera.sqoop.lib.SqoopRecord;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class TT extends SqoopRecord  implements DBWritable, Writable {
+  private final int PROTOCOL_VERSION = 3;
+  public int getClassFormatVersion() { return PROTOCOL_VERSION; }
+  protected ResultSet __cur_result_set;
+  private Integer I;
+  public Integer get_I() {
+    return I;
+  }
+  public void set_I(Integer I) {
+    this.I = I;
+  }
+  public TT with_I(Integer I) {
+    this.I = I;
+    return this;
+  }
+  private String S;
+  public String get_S() {
+    return S;
+  }
+  public void set_S(String S) {
+    this.S = S;
+  }
+  public TT with_S(String S) {
+    this.S = S;
+    return this;
+  }
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof TT)) {
+      return false;
+    }
+    TT that = (TT) o;
+    boolean equal = true;
+    equal = equal && (this.I == null ? that.I == null : this.I.equals(that.I));
+    equal = equal && (this.S == null ? that.S == null : this.S.equals(that.S));
+    return equal;
+  }
+  public void readFields(ResultSet __dbResults) throws SQLException {
+    this.__cur_result_set = __dbResults;
+    this.I = JdbcWritableBridge.readInteger(1, __dbResults);
+    this.S = JdbcWritableBridge.readString(2, __dbResults);
+  }
+  public void loadLargeObjects(LargeObjectLoader __loader)
+      throws SQLException, IOException, InterruptedException {
+  }
+  public void write(PreparedStatement __dbStmt) throws SQLException {
+    write(__dbStmt, 0);
+  }
+
+  public int write(PreparedStatement __dbStmt, int __off) throws SQLException {
+    JdbcWritableBridge.writeInteger(I, 1 + __off, 4, __dbStmt);
+    JdbcWritableBridge.writeString(S, 2 + __off, 12, __dbStmt);
+    return 2;
+  }
+  public void readFields(DataInput __dataIn) throws IOException {
+    if (__dataIn.readBoolean()) {
+        this.I = null;
+    } else {
+    this.I = Integer.valueOf(__dataIn.readInt());
+    }
+    if (__dataIn.readBoolean()) {
+        this.S = null;
+    } else {
+    this.S = Text.readString(__dataIn);
+    }
+  }
+  public void write(DataOutput __dataOut) throws IOException {
+    if (null == this.I) {
+        __dataOut.writeBoolean(true);
+    } else {
+        __dataOut.writeBoolean(false);
+    __dataOut.writeInt(this.I);
+    }
+    if (null == this.S) {
+        __dataOut.writeBoolean(true);
+    } else {
+        __dataOut.writeBoolean(false);
+    Text.writeString(__dataOut, S);
+    }
+  }
+  private final DelimiterSet __outputDelimiters = new DelimiterSet((char) 44, (char) 10, (char) 0, (char) 0, false);
+  public String toString() {
+    return toString(__outputDelimiters, true);
+  }
+  public String toString(DelimiterSet delimiters) {
+    return toString(delimiters, true);
+  }
+  public String toString(boolean useRecordDelim) {
+    return toString(__outputDelimiters, useRecordDelim);
+  }
+  public String toString(DelimiterSet delimiters, boolean useRecordDelim) {
+    StringBuilder __sb = new StringBuilder();
+    char fieldDelim = delimiters.getFieldsTerminatedBy();
+    __sb.append(FieldFormatter.escapeAndEnclose(I==null?"null":"" + I, delimiters));
+    __sb.append(fieldDelim);
+    __sb.append(FieldFormatter.escapeAndEnclose(S==null?"null":S, delimiters));
+    if (useRecordDelim) {
+      __sb.append(delimiters.getLinesTerminatedBy());
+    }
+    return __sb.toString();
+  }
+  private final DelimiterSet __inputDelimiters = new DelimiterSet((char) 44, (char) 10, (char) 0, (char) 0, false);
+  private RecordParser __parser;
+  public void parse(Text __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  public void parse(CharSequence __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  public void parse(byte [] __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  public void parse(char [] __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  public void parse(ByteBuffer __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  public void parse(CharBuffer __record) throws RecordParser.ParseError {
+    if (null == this.__parser) {
+      this.__parser = new RecordParser(__inputDelimiters);
+    }
+    List<String> __fields = this.__parser.parseRecord(__record);
+    __loadFromFields(__fields);
+  }
+
+  private void __loadFromFields(List<String> fields) {
+    Iterator<String> __it = fields.listIterator();
+    String __cur_str;
+    __cur_str = __it.next();
+    if (__cur_str.equals("null") || __cur_str.length() == 0) { this.I = null; } else {
+      this.I = Integer.valueOf(__cur_str);
+    }
+
+    __cur_str = __it.next();
+    if (__cur_str.equals("null")) { this.S = null; } else {
+      this.S = __cur_str;
+    }
+
+  }
+
+  public Object clone() throws CloneNotSupportedException {
+    TT o = (TT) super.clone();
+    return o;
+  }
+
+  public Map<String, Object> getFieldMap() {
+    Map<String, Object> __sqoop$field_map = new TreeMap<String, Object>();
+    __sqoop$field_map.put("I", this.I);
+    __sqoop$field_map.put("S", this.S);
+    return __sqoop$field_map;
+  }
+
+  public void setField(String __fieldName, Object __fieldVal) {
+    if ("I".equals(__fieldName)) {
+      this.I = (Integer) __fieldVal;
+    }
+    else    if ("S".equals(__fieldName)) {
+      this.S = (String) __fieldVal;
+    }
+    else {
+      throw new RuntimeException("No such field: " + __fieldName);
+    }
+  }
+}

+ 17 - 0
apps/oozie/examples/unmanaged/sqoop/db.hsqldb.properties

@@ -0,0 +1,17 @@
+#HSQL Database Engine 1.8.0.10
+#Wed Sep 19 17:26:48 PDT 2012
+hsqldb.script_format=0
+runtime.gc_interval=0
+sql.enforce_strict_size=false
+hsqldb.cache_size_scale=8
+readonly=false
+hsqldb.nio_data_file=true
+hsqldb.cache_scale=14
+version=1.8.0
+hsqldb.default_table_type=memory
+hsqldb.cache_file_scale=1
+hsqldb.log_size=200
+modified=yes
+hsqldb.cache_version=1.7.0
+hsqldb.original_version=1.8.0
+hsqldb.compatible_version=1.8.0

+ 9 - 0
apps/oozie/examples/unmanaged/sqoop/db.hsqldb.script

@@ -0,0 +1,9 @@
+CREATE SCHEMA PUBLIC AUTHORIZATION DBA
+CREATE MEMORY TABLE TT(I INTEGER NOT NULL PRIMARY KEY,S VARCHAR(256))
+CREATE USER SA PASSWORD ""
+GRANT DBA TO SA
+SET WRITE_DELAY 10
+SET SCHEMA PUBLIC
+INSERT INTO TT VALUES(1,'a')
+INSERT INTO TT VALUES(2,'a')
+INSERT INTO TT VALUES(3,'a')

+ 0 - 0
apps/oozie/examples/unmanaged/ssh/empty


文件差异内容过多而无法显示
+ 1254 - 288
apps/oozie/src/oozie/fixtures/initial_oozie_examples.json


+ 9 - 9
apps/oozie/src/oozie/tests.py

@@ -327,7 +327,7 @@ class TestAPI(OozieMockBase):
     OozieMockBase.setUp(self)
 
     # When updating wf, update wf_json as well!
-    self.wf = Workflow.objects.get(name='wf-name-1')
+    self.wf = Workflow.objects.get(name='wf-name-1', managed=True)
 
   def test_workflow_save(self):
     self.setup_simple_workflow()
@@ -492,7 +492,7 @@ class TestAPIWithOozie(OozieBase):
     OozieBase.setUp(self)
 
     # When updating wf, update wf_json as well!
-    self.wf = Workflow.objects.get(name='MapReduce').clone(self.cluster.fs, self.user)
+    self.wf = Workflow.objects.get(name='MapReduce', managed=True).clone(self.cluster.fs, self.user)
 
   def test_import_jobsub_actions(self):
     # Setup jobsub examples
@@ -523,7 +523,7 @@ class TestApiPermissionsWithOozie(OozieBase):
     OozieBase.setUp(self)
 
     # When updating wf, update wf_json as well!
-    self.wf = Workflow.objects.get(name='MapReduce').clone(self.cluster.fs, self.user)
+    self.wf = Workflow.objects.get(name='MapReduce', managed=True).clone(self.cluster.fs, self.user)
 
   def test_workflow_save(self):
     # Share
@@ -1715,7 +1715,7 @@ class TestPermissions(OozieBase):
       finish()
 
     # Share it !
-    self.wf = Workflow.objects.get(name='wf-name-1')
+    self.wf = Workflow.objects.get(name='wf-name-1', managed=True)
     self.wf.is_shared = True
     self.wf.save()
     Workflow.objects.check_workspace(self.wf, self.cluster.fs)
@@ -1816,7 +1816,7 @@ class TestPermissions(OozieBase):
       finish()
 
     # Share it !
-    wf = Workflow.objects.get(id=coord.workflow.id)
+    wf = Workflow.objects.get(id=coord.workflow.id, managed=True)
     wf.is_shared = True
     wf.save()
     Workflow.objects.check_workspace(wf, self.cluster.fs)
@@ -2104,7 +2104,7 @@ class TestImportWorkflow04WithOozie(OozieBase):
 class TestOozieSubmissions(OozieBase):
 
   def test_submit_mapreduce_action(self):
-    wf = Workflow.objects.get(name='MapReduce')
+    wf = Workflow.objects.get(name='MapReduce', managed=True)
     post_data = {u'form-MAX_NUM_FORMS': [u''], u'form-INITIAL_FORMS': [u'1'],
                  u'form-0-name': [u'REDUCER_SLEEP_TIME'], u'form-0-value': [u'1'], u'form-TOTAL_FORMS': [u'1']}
 
@@ -2134,7 +2134,7 @@ class TestOozieSubmissions(OozieBase):
 
 
   def test_submit_java_action(self):
-    wf = Workflow.objects.get(name='Sequential Java')
+    wf = Workflow.objects.get(name='Sequential Java', managed=True)
 
     response = self.c.post(reverse('oozie:submit_workflow', args=[wf.id]),
                            data={u'form-MAX_NUM_FORMS': [u''],
@@ -2147,7 +2147,7 @@ class TestOozieSubmissions(OozieBase):
 
 
   def test_submit_distcp_action(self):
-    wf = Workflow.objects.get(name='DistCp')
+    wf = Workflow.objects.get(name='DistCp', managed=True)
 
     response = self.c.post(reverse('oozie:submit_workflow', args=[wf.id]),
                            data= {u'form-MAX_NUM_FORMS': [u''], u'form-TOTAL_FORMS': [u'3'], u'form-INITIAL_FORMS': [u'3'],
@@ -2537,7 +2537,7 @@ class TestUtils(OozieMockBase):
     OozieMockBase.setUp(self)
 
     # When updating wf, update wf_json as well!
-    self.wf = Workflow.objects.get(name='wf-name-1')
+    self.wf = Workflow.objects.get(name='wf-name-1', managed=True)
 
 
   def test_workflow_to_dict(self):

部分文件因为文件数量过多而无法显示