Эх сурвалжийг харах

HUE-898 [oozie] Import workflow definition

- Use xslt to transform XML to django serialized object
- Resolve links manually
abec 13 жил өмнө
parent
commit
ffd5776
56 өөрчлөгдсөн 2114 нэмэгдсэн , 16 устгасан
  1. 6 0
      apps/oozie/src/oozie/conf.py
  2. 6 0
      apps/oozie/src/oozie/forms.py
  3. 282 0
      apps/oozie/src/oozie/import_workflow.py
  4. 11 0
      apps/oozie/src/oozie/models.py
  5. 17 14
      apps/oozie/src/oozie/templates/editor/edit_workflow.mako
  6. 85 0
      apps/oozie/src/oozie/templates/editor/import_workflow.mako
  7. 1 0
      apps/oozie/src/oozie/templates/editor/list_workflows.mako
  8. 4 0
      apps/oozie/src/oozie/test_data/0.4/test-basic.xml
  9. 292 0
      apps/oozie/src/oozie/test_data/0.4/test-decision.xml
  10. 20 0
      apps/oozie/src/oozie/test_data/0.4/test-distcp.0.1.xml
  11. 287 0
      apps/oozie/src/oozie/test_data/0.4/test-forks.xml
  12. 29 0
      apps/oozie/src/oozie/test_data/0.4/test-java.xml
  13. 61 0
      apps/oozie/src/oozie/test_data/0.4/test-mapreduce.xml
  14. 20 0
      apps/oozie/src/oozie/test_data/0.4/test-pig.xml
  15. 21 0
      apps/oozie/src/oozie/test_data/0.4/test-sqoop.0.2.xml
  16. 120 0
      apps/oozie/src/oozie/tests.py
  17. 1 0
      apps/oozie/src/oozie/urls.py
  18. 36 2
      apps/oozie/src/oozie/views/editor.py
  19. 35 0
      apps/oozie/src/oozie/xslt/0.4/action.xslt
  20. 30 0
      apps/oozie/src/oozie/xslt/0.4/control.xslt
  21. 37 0
      apps/oozie/src/oozie/xslt/0.4/extensions/distcp.0.1.xslt
  22. 30 0
      apps/oozie/src/oozie/xslt/0.4/extensions/hive.0.1.xslt
  23. 30 0
      apps/oozie/src/oozie/xslt/0.4/extensions/hive.0.2.xslt
  24. 32 0
      apps/oozie/src/oozie/xslt/0.4/extensions/shell.0.1.xslt
  25. 30 0
      apps/oozie/src/oozie/xslt/0.4/extensions/sqoop.0.1.xslt
  26. 31 0
      apps/oozie/src/oozie/xslt/0.4/extensions/sqoop.0.2.xslt
  27. 26 0
      apps/oozie/src/oozie/xslt/0.4/extensions/ssh.0.1.xslt
  28. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/decision.xslt
  29. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/end.xslt
  30. 20 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/archives.xslt
  31. 20 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/args.xslt
  32. 24 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/arguments.xslt
  33. 21 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/capture_output.xslt
  34. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/command.xslt
  35. 20 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/files.xslt
  36. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/host.xslt
  37. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/jar_path.xslt
  38. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/java_opts.xslt
  39. 24 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/job_properties.xslt
  40. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/job_xml.xslt
  41. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/main_class.xslt
  42. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/mapper.xslt
  43. 24 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/params.xslt
  44. 26 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/prepares.xslt
  45. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/reducer.xslt
  46. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/script_path.xslt
  47. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fields/user.xslt
  48. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/fork.xslt
  49. 34 0
      apps/oozie/src/oozie/xslt/0.4/nodes/java.xslt
  50. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/join.xslt
  51. 13 0
      apps/oozie/src/oozie/xslt/0.4/nodes/kill.xslt
  52. 28 0
      apps/oozie/src/oozie/xslt/0.4/nodes/mapreduce.xslt
  53. 30 0
      apps/oozie/src/oozie/xslt/0.4/nodes/pig.xslt
  54. 14 0
      apps/oozie/src/oozie/xslt/0.4/nodes/start.xslt
  55. 26 0
      apps/oozie/src/oozie/xslt/0.4/nodes/streaming.xslt
  56. 20 0
      apps/oozie/src/oozie/xslt/0.4/workflow.xslt

+ 6 - 0
apps/oozie/src/oozie/conf.py

@@ -23,6 +23,12 @@ from desktop.lib.conf import Config, coerce_bool
 from desktop.lib import paths
 
 
+DEFINITION_XSLT_DIR = Config(
+  key="definition_xslt_dir",
+  default=os.path.join(os.path.dirname(__file__), "xslt"),
+  help=_("Location on local FS where the xslt files are stored for workflow import."),
+  private=True)
+
 LOCAL_SAMPLE_DIR = Config(
   key="local_data_dir",
   default=os.path.join(os.path.dirname(__file__), "..", "..", "examples"),

+ 6 - 0
apps/oozie/src/oozie/forms.py

@@ -63,6 +63,12 @@ class WorkflowForm(forms.ModelForm):
     super(WorkflowForm, self).__init__(*args, **kwargs)
 
 
+SCHEMA_VERSION_CHOICES = ['0.4']
+class ImportWorkflowForm(WorkflowForm):
+  definition = forms.CharField(widget=forms.Textarea())
+  schema_version = forms.ChoiceField(choices=[(version, version) for version in SCHEMA_VERSION_CHOICES])
+
+
 class ImportJobsubDesignForm(forms.Form):
   """Used for specifying what oozie actions to import"""
   def __init__(self, choices=[], *args, **kwargs):

+ 282 - 0
apps/oozie/src/oozie/import_workflow.py

@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Import an external workflow by providing an XML definition.
+The workflow definition is imported via the method 'import_workflow'.
+The XML is first transformed into a django serialized string that can be deserialized and interpreted.
+The interpreted objects are then assigned the worklow, stripped of any useless IDs and saved.
+Then the links are interpreted from the original XML definition.
+First the basic links are interpreted for basic hierarchy traversal.
+Then the related links are infered, including Decision node ends.
+See oozie.models.Decision for more information on decision ends.
+
+The XSLTs are partitioned by version.
+For every new workflow DTD version a new directory should be created.
+IE: uri:oozie:workflow:0.4 => 0.4 directory in xslt dir.
+Action extensions are also versioned.
+Every action extension will have its own version via /xslt/<workflow version>/extensions/<name of extensions>.<version>.xslt
+"""
+
+try:
+  import json
+except ImportError:
+  import simplejson as json
+from collections import deque
+import logging
+import re
+from lxml import etree
+
+from django.core import serializers
+
+from conf import DEFINITION_XSLT_DIR
+from models import Node, Link, Start, End, Decision, Fork, Join
+
+LOG = logging.getLogger(__name__)
+
+LINKS = ('ok', 'error', 'path')
+def _save_links(workflow, root):
+  """
+  Iterates over all links in the passed XML doc and creates links.
+  First non-META links are resolved and created, then META links.
+  Link name is chosen with the following logic:
+    If node is start, then use 'to'.
+    Else If node is Join, then use 'to'.
+    Else If node is Decision, then
+      If tag is 'default', then use 'default'
+      Else use 'start'
+    Else
+      If tag is 'path', use 'start'
+      Else use tag as name ('ok' or 'error')
+
+  This strategy has the following resolution:
+    - Fork and Decision nodes have Links named 'start'.
+    - Decision nodes have a 'default' link.
+    - Decision nodes may have a 'related' link that is there end.
+    - Fork nodes always have a 'related' node that is there end join node.
+    - Start and Join nodes have links named 'to'.
+    - All action nodes have 'ok' and 'error' links.
+
+  Note: The nodes that these links point to should exist already.
+  Note: Nodes are looked up by workflow and name.
+  """
+  # Iterate over nodes
+  for node in root:
+    # Iterate over node members
+    # Join nodes have attributes which point to the next node
+    # Start node has attribute which points to first node
+    parent = Node.objects.get(workflow=workflow, name=node.attrib.get('name', node.tag)).get_full_node()
+
+    if isinstance(parent, Start):
+      workflow.start = parent
+      to = node.attrib['to']
+      child = Node.objects.get(workflow=workflow, name=to)
+      obj = Link.objects.create(name='to', parent=parent, child=child)
+      obj.save()
+
+    elif isinstance(parent, Join):
+      to = node.attrib['to']
+      child = Node.objects.get(workflow=workflow, name=to)
+      obj = Link.objects.create(name='to', parent=parent, child=child)
+      obj.save()
+
+    elif isinstance(parent, Decision):
+      for switch in node:
+        for case in switch:
+          to = case.attrib['to']
+          child = Node.objects.get(workflow=workflow, name=to)
+          
+          if case.tag == 'default':
+            name = 'default'
+            obj = Link.objects.create(name=name, parent=parent, child=child)
+
+          else:
+            name = 'start'
+            comment = case.text.strip()
+            obj = Link.objects.create(name=name, parent=parent, child=child, comment=comment)
+          
+          obj.save()
+
+    else:
+      for el in node:
+        # Links
+        if el.tag in LINKS:
+          name = el.tag
+          if el.tag == 'path':
+            to = el.attrib['start']
+            name = 'start'
+          else:
+            to = el.attrib['to']
+
+          child = Node.objects.get(workflow=workflow, name=to)
+          obj = Link.objects.create(name=name, parent=parent, child=child)
+          obj.save()
+
+  workflow.end = End.objects.get(workflow=workflow).get_full_node()
+  workflow.save()
+
+  _resolve_fork_relationships(workflow)
+  _resolve_decision_relationships(workflow)
+
+
+def _resolve_fork_relationships(workflow):
+  """
+  Requires proper workflow structure.
+  Fork must come before a join.
+  """
+  def helper(workflow, node, last_fork):
+    if isinstance(node, Fork):
+      join = None
+      children = node.get_children()
+      for child in children:
+        join = helper(workflow, child.get_full_node(), node) or join
+      link = Link(name='related', parent=node, child=join)
+      link.save()
+
+      node = join
+
+    elif isinstance(node, Join):
+      return node
+
+    join = None
+    children = node.get_children()
+    for child in children:
+      join = helper(workflow, child.get_full_node(), last_fork) or join
+    return join
+
+  helper(workflow, workflow.start.get_full_node(), None)
+
+
+def _resolve_decision_relationships(workflow):
+  """
+  Requires proper workflow structure.
+  Decision must come before a any random ends.
+  Ends for decisions should be at the highest decision.
+  IE:      D
+         D   N
+       N   N
+           N
+  The decision at the top should have the end, not the nested decision.
+
+  Performs a breadth first search to understand branching.
+  Call helper for every new decision found.
+  Skip forks because decisions currently cannot live in forks.
+  """
+  def find_decision(node):
+    if isinstance(node, Fork):
+      node = node.get_child_join().get_full_node()
+
+    decision = None
+    children = node.get_children()
+    for child in children:
+      child = child.get_full_node()
+      if isinstance(child, Decision):
+        return child
+      decision = find_decision(child) or decision
+    return decision
+
+  def helper(decision):
+    visit = deque(decision.get_children())
+    branch_count = len(visit)
+
+    # Find end
+    while visit:
+      node = visit.popleft()
+      parents = node.get_parents()
+
+      # An end found...
+      # IF it covers all branches, then it is a true end.
+      # ELSE it is a false end and belongs to a higher decision.
+      if len(parents) > 1:
+        if len(parents) == branch_count:
+          link = Link(name='related', parent=decision, child=node)
+          link.save()
+
+        else:
+          return node, branch_count
+
+      elif isinstance(node, Decision):
+        inner_branch_count, end = helper(node)
+        branch_count = branch_count + inner_branch_count - 1
+
+        if len(end.get_parents()) == branch_count:
+          link = Link(name='related', parent=decision, child=end)
+          link.save()
+        else:
+          return node, branch_count
+
+      visit.extend(node.get_children())
+
+  decision = find_decision(workflow.start.get_full_node())
+  if decision is not None:
+    helper(decision)
+
+
+def _save_nodes(workflow, root):
+  # Deserialize
+  objs = serializers.deserialize('xml', etree.tostring(root))
+
+  # First pass is a list of nodes and their types respectively.
+  # Must link up nodes with their respective full nodes.
+  node = None
+  for obj in objs:
+    obj.object.workflow = workflow
+    if type(obj.object) is Node:
+      node = obj.object
+    else:
+      node.node_type = obj.object.node_type
+      full_node = obj.object
+      for k, v in vars(node).items():
+        if not k.startswith('_') and k not in ('node_type','workflow','node_ptr_id'):
+          setattr(full_node, k, v)
+      full_node.workflow = workflow
+      full_node.node_type = type(full_node).node_type
+      full_node.node_ptr_id = None
+      full_node.id = None
+
+      if full_node.node_type is 'start':
+        full_node.name = 'start'
+
+      full_node.save()
+
+
+def import_workflow(workflow, workflow_definition, schema_version=0.4):
+  xslt_definition_fh = open("%(xslt_dir)s/%(schema_version)s/workflow.xslt" % {
+    'xslt_dir': DEFINITION_XSLT_DIR.get(),
+    'schema_version': schema_version
+  })
+  
+  # Remove namespace from definition
+  workflow_definition = re.sub(r'\s*xmlns=".*?"', '', workflow_definition, count=1)
+
+  # Parse Workflow Definition
+  xml = etree.fromstring(workflow_definition)
+  
+  # Get XSLT
+  xslt = etree.parse(xslt_definition_fh)
+  xslt_definition_fh.close()
+  transform = etree.XSLT(xslt)
+
+  # Transform XML using XSLT
+  root = transform(xml)
+
+  # Resolve workflow dependencies and node types and link dependencies
+  _save_nodes(workflow, root)
+  _save_links(workflow, xml)
+
+  # Update schema_version
+  workflow.schema_version = "uri:oozie:workflow:%s" % schema_version
+  workflow.save()

+ 11 - 0
apps/oozie/src/oozie/models.py

@@ -989,6 +989,17 @@ class Fork(ControlFlow):
 
 
 class Decision(ControlFlow):
+  """
+  Essentially a fork where the end is not a join, but another node.
+  If two decisions share an end, the decision with the higher level takes the end 
+  and the lower level decision will not have an end.
+  IE:     D
+        D   N
+          E
+    The first 'D' will be assigned the end 'E'.
+    The second 'D' will not have an end.
+  This enables easier interpretation of visual hierarchy.
+  """
   node_type = 'decision'
 
   def get_child_end_or_none(self):

+ 17 - 14
apps/oozie/src/oozie/templates/editor/edit_workflow.mako

@@ -710,6 +710,20 @@ $.extend(DecisionModel.prototype, {
   child_links: []
 });
 
+var DistCPModel = ModelModule($);
+$.extend(DistCPModel.prototype, {
+  id: 0,
+  name: '',
+  description: '',
+  node_type: 'distcp',
+  workflow: 0,
+  job_properties: [],
+  prepares: [],
+  job_xml: '',
+  params: [],
+  child_links: []
+});
+
 var MapReduceModel = ModelModule($);
 $.extend(MapReduceModel.prototype, {
   id: 0,
@@ -844,20 +858,6 @@ $.extend(SshModel.prototype, {
   child_links: []
 });
 
-var DistCPModel = ModelModule($);
-$.extend(DistCPModel.prototype, {
-  id: 0,
-  name: '',
-  description: '',
-  node_type: 'distcp',
-  workflow: 0,
-  job_properties: [],
-  prepares: [],
-  job_xml: '',
-  params: [],
-  child_links: []
-});
-
 function nodeModelChooser(node_type) {
   switch(node_type) {
     case 'mapreduce':
@@ -1888,6 +1888,9 @@ var WorkflowModule = function($, NodeModelChooser, Node, ForkNode, DecisionNode,
               break;
             }
 
+            if ('main_class' in temp) {
+              console.log(temp.main_class());
+            }
             self.registry.add(temp.id(), temp);
           });
         }

+ 85 - 0
apps/oozie/src/oozie/templates/editor/import_workflow.mako

@@ -0,0 +1,85 @@
+## Licensed to Cloudera, Inc. under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  Cloudera, Inc. licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+
+<%!
+  from desktop.views import commonheader, commonfooter
+  from django.utils.translation import ugettext as _
+%>
+
+<%namespace name="layout" file="../navigation-bar.mako" />
+<%namespace name="utils" file="../utils.inc.mako" />
+
+${ commonheader(_("Oozie App"), "oozie", user, "100px") }
+${ layout.menubar(section='workflows') }
+
+
+<div class="container-fluid">
+  <h1>${ _('Import Workflow') }</h1>
+
+    <div class="well">
+      <br/>
+    </div>
+
+    <div style="min-height:300px">
+      <form class="form-horizontal" id="workflowForm" action="${ url('oozie:import_workflow') }" method="POST">
+
+      <div class="row-fluid">
+        <div class="span2">
+        </div>
+        <div class="span8">
+          <fieldset>
+          ${ utils.render_field(workflow_form['name']) }
+          ${ utils.render_field(workflow_form['description']) }
+          ${ utils.render_field(workflow_form['definition']) }
+          ${ utils.render_field(workflow_form['schema_version']) }
+
+          <div class="control-group ">
+            <label class="control-label">
+              <a href="#" id="advanced-btn" onclick="$('#advanced-container').toggle('hide')">
+                <i class="icon-share-alt"></i> ${ _('advanced') }
+              </a>
+            </label>
+            <div class="controls"></div>
+          </div>
+
+            <div id="advanced-container" class="hide">
+              ${ utils.render_field(workflow_form['is_shared']) }
+              ${ utils.render_field(workflow_form['deployment_dir']) }
+              ${ utils.render_field(workflow_form['job_xml']) }
+           </div>
+
+           <div class="hide">
+             ${ workflow_form['schema_version'] }
+             ${ workflow_form['job_properties'] }
+             ${ workflow_form['parameters'] }
+         </div>
+         </fieldset>
+
+        <div class="span2"></div>
+        </div>
+      </div>
+
+      <div class="form-actions center">
+        <input class="btn btn-primary" type="submit" value="${ _('Save') }" />
+        <a class="btn" onclick="history.back()">${ _('Back') }</a>
+      </div>
+      </form>
+    </div>
+</div>
+
+${ utils.path_chooser_libs(True) }
+
+${ commonfooter(messages) }

+ 1 - 0
apps/oozie/src/oozie/templates/editor/list_workflows.mako

@@ -33,6 +33,7 @@ ${ layout.menubar(section='workflows') }
   <div class="well hueWell">
     <div class="btn-group pull-right">
       <a href="${ url('oozie:create_workflow') }" class="btn"><i class="icon-plus-sign"></i> ${ _('Create') }</a>
+      <a href="${ url('oozie:import_workflow') }" class="btn"><i class="icon-plus-sign"></i> ${ _('Import') }</a>
       % if currentuser.is_superuser:
         <a href="#installSamples" data-toggle="modal" class="btn"><i class="icon-download-alt"></i> ${ _('Setup Examples') }</a>
       % endif

+ 4 - 0
apps/oozie/src/oozie/test_data/0.4/test-basic.xml

@@ -0,0 +1,4 @@
+<workflow-app name="test-workflow">
+<start to="done"/>
+<end name="done"/>
+</workflow-app>

+ 292 - 0
apps/oozie/src/oozie/test_data/0.4/test-decision.xml

@@ -0,0 +1,292 @@
+<workflow-app name="Forks-copy" xmlns="uri:oozie:workflow:0.4">
+    <start to="decision-85"/>
+    <decision name="decision-85">
+        <switch>
+            <case to="Sleep-1">
+              ${1 gt 2}
+            </case>
+            <case to="Sleep-5">
+            </case>
+            <default to="end"/>
+        </switch>
+    </decision>
+    <action name="Sleep-1">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>1</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="Sleep-10"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-10">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>10</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-5">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>5</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="fork-82"/>
+        <error to="kill"/>
+    </action>
+    <fork name="fork-82">
+        <path start="Sleep-3" />
+        <path start="Sleep-4" />
+    </fork>
+    <action name="Sleep-3">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>3</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="join-83"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-4">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>4</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="join-83"/>
+        <error to="kill"/>
+    </action>
+    <join name="join-83" to="end"/>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 20 - 0
apps/oozie/src/oozie/test_data/0.4/test-distcp.0.1.xml

@@ -0,0 +1,20 @@
+<workflow-app name="DistCp" xmlns="uri:oozie:workflow:0.4">
+    <start to="DistCp"/>
+    <action name="DistCp">
+        <distcp xmlns="uri:oozie:distcp-action:0.1">
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+              <arg>-overwrite</arg>
+              <arg>-m</arg>
+              <arg>${MAP_NUMBER}</arg>
+              <arg>/user/hue/oozie/workspaces/data</arg>
+              <arg>${OUTPUT}</arg>
+        </distcp>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 287 - 0
apps/oozie/src/oozie/test_data/0.4/test-forks.xml

@@ -0,0 +1,287 @@
+<workflow-app name="Forks" xmlns="uri:oozie:workflow:0.4">
+    <start to="fork-34"/>
+    <fork name="fork-34">
+        <path start="Sleep-1" />
+        <path start="Sleep-5" />
+    </fork>
+    <action name="Sleep-1">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>1</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="Sleep-10"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-10">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>10</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="join-35"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-5">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>5</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="fork-38"/>
+        <error to="kill"/>
+    </action>
+    <fork name="fork-38">
+        <path start="Sleep-3" />
+        <path start="Sleep-4" />
+    </fork>
+    <action name="Sleep-3">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>3</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="join-39"/>
+        <error to="kill"/>
+    </action>
+    <action name="Sleep-4">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>4</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="join-39"/>
+        <error to="kill"/>
+    </action>
+    <join name="join-39" to="join-35"/>
+    <join name="join-35" to="end"/>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 29 - 0
apps/oozie/src/oozie/test_data/0.4/test-java.xml

@@ -0,0 +1,29 @@
+<workflow-app name="Sequential Java" xmlns="uri:oozie:workflow:0.4">
+    <start to="TeraGenWorkflow"/>
+    <action name="TeraGenWorkflow">
+        <java>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <main-class>org.apache.hadoop.examples.terasort.TeraGen</main-class>
+            <arg>${records}</arg>
+            <arg>${output_dir}/teragen</arg>
+        </java>
+        <ok to="TeraSort"/>
+        <error to="kill"/>
+    </action>
+    <action name="TeraSort">
+        <java>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <main-class>org.apache.hadoop.examples.terasort.TeraSort</main-class>
+            <arg>${output_dir}/teragen</arg>
+            <arg>${output_dir}/terasort</arg>
+        </java>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 61 - 0
apps/oozie/src/oozie/test_data/0.4/test-mapreduce.xml

@@ -0,0 +1,61 @@
+<workflow-app name="MapReduce" xmlns="uri:oozie:workflow:0.4">
+    <start to="Sleep-1"/>
+    <action name="Sleep-1">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>1</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 20 - 0
apps/oozie/src/oozie/test_data/0.4/test-pig.xml

@@ -0,0 +1,20 @@
+<workflow-app name="Pig" xmlns="uri:oozie:workflow:0.4">
+    <start to="Pig"/>
+    <action name="Pig">
+        <pig>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <script>aggregate.pig</script>
+              <argument>-param</argument>
+              <argument>INPUT=/user/hue/oozie/workspaces/data</argument>
+              <argument>-param</argument>
+              <argument>OUTPUT=${output}</argument>
+        </pig>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 21 - 0
apps/oozie/src/oozie/test_data/0.4/test-sqoop.0.2.xml

@@ -0,0 +1,21 @@
+<workflow-app name="Sqoop" xmlns="uri:oozie:workflow:0.4">
+    <start to="Sqoop"/>
+    <action name="Sqoop">
+        <sqoop xmlns="uri:oozie:sqoop-action:0.2">
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <prepare>
+                  <delete path="${nameNode}${output}"/>
+            </prepare>
+            <command>import --connect jdbc:hsqldb:file:db.hsqldb --table TT --target-dir ${output} -m 1</command>
+            <file>db.hsqldb.properties#db.hsqldb.properties</file>
+            <file>db.hsqldb.script#db.hsqldb.script</file>
+        </sqoop>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 120 - 0
apps/oozie/src/oozie/tests.py

@@ -41,6 +41,7 @@ from oozie.models import Workflow, Node, Kill, Link, Job, Coordinator, History,\
   find_parameters, NODE_TYPES
 from oozie.conf import SHARE_JOBS
 from oozie.utils import workflow_to_dict, model_to_dict
+from oozie.import_workflow import import_workflow
 
 
 LOG = logging.getLogger(__name__)
@@ -130,6 +131,7 @@ class OozieMockBase(object):
 
     self.c = make_logged_in_client(is_superuser=False)
     grant_access("test", "test", "oozie")
+    self.user = User.objects.get(username='test')
     self.wf = create_workflow(self.c)
 
 
@@ -775,6 +777,124 @@ class TestEditor(OozieMockBase):
                  coord.find_all_parameters())
 
 
+  def test_import_workflow_basic(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-basic.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(2, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(1, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal('done', Node.objects.get(workflow=workflow, node_type='end').name)
+    workflow.delete()
+
+
+  def test_import_workflow_decision(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-decision.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(11, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(19, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal(1, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='decision', comment='${1 gt 2}', name='start')))
+    assert_equal(1, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='decision', comment='', name='start')))
+    assert_equal(1, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='decision', name='default')))
+    assert_equal(1, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='decision', child__node_type='end', name='related')))
+    workflow.delete()
+
+
+  def test_import_workflow_distcp(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-distcp.0.1.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(4, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(3, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal('[{"type":"arg","value":"-overwrite"},{"type":"arg","value":"-m"},{"type":"arg","value":"${MAP_NUMBER}"},{"type":"arg","value":"/user/hue/oozie/workspaces/data"},{"type":"arg","value":"${OUTPUT}"}]', Node.objects.get(workflow=workflow, node_type='distcp').get_full_node().params)
+    workflow.delete()
+
+
+  def test_import_workflow_forks(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-forks.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(12, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(19, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal(6, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='fork')))
+    assert_equal(4, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='fork', name='start')))
+    assert_equal(2, len(Link.objects.filter(parent__workflow=workflow, parent__node_type='fork', child__node_type='join', name='related')))
+    workflow.delete()
+
+
+  def test_import_workflow_mapreduce(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-mapreduce.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(4, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(3, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal('[{"name":"mapred.reduce.tasks","value":"1"},{"name":"mapred.mapper.class","value":"org.apache.hadoop.examples.SleepJob"},{"name":"mapred.reducer.class","value":"org.apache.hadoop.examples.SleepJob"},{"name":"mapred.mapoutput.key.class","value":"org.apache.hadoop.io.IntWritable"},{"name":"mapred.mapoutput.value.class","value":"org.apache.hadoop.io.NullWritable"},{"name":"mapred.output.format.class","value":"org.apache.hadoop.mapred.lib.NullOutputFormat"},{"name":"mapred.input.format.class","value":"org.apache.hadoop.examples.SleepJob$SleepInputFormat"},{"name":"mapred.partitioner.class","value":"org.apache.hadoop.examples.SleepJob"},{"name":"mapred.speculative.execution","value":"false"},{"name":"sleep.job.map.sleep.time","value":"0"},{"name":"sleep.job.reduce.sleep.time","value":"1"}]', Node.objects.get(workflow=workflow, node_type='mapreduce').get_full_node().job_properties)
+    workflow.delete()
+
+
+  def test_import_workflow_pig(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-pig.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    node = Node.objects.get(workflow=workflow, node_type='pig').get_full_node()
+    assert_equal(4, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(3, len(Link.objects.filter(parent__workflow=workflow)))
+    assert_equal('aggregate.pig', node.script_path)
+    assert_equal('[{"type":"argument","value":"-param"},{"type":"argument","value":"INPUT=/user/hue/oozie/workspaces/data"},{"type":"argument","value":"-param"},{"type":"argument","value":"OUTPUT=${output}"}]', node.params)
+    workflow.delete()
+
+
+  def test_import_workflow_sqoop(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-sqoop.0.2.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(4, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(3, len(Link.objects.filter(parent__workflow=workflow)))
+    node = Node.objects.get(workflow=workflow, node_type='sqoop').get_full_node()
+    assert_equal('["db.hsqldb.properties#db.hsqldb.properties","db.hsqldb.script#db.hsqldb.script"]', node.files)
+    assert_equal('import --connect jdbc:hsqldb:file:db.hsqldb --table TT --target-dir ${output} -m 1', node.script_path)
+    workflow.delete()
+
+
+  def test_import_workflow_java(self):
+    workflow = Workflow.objects.new_workflow(self.user)
+    workflow.save()
+    f = open('apps/oozie/src/oozie/test_data/0.4/test-java.xml')
+    import_workflow(workflow, f.read(), schema_version=0.4)
+    f.close()
+    workflow.save()
+    assert_equal(5, len(Node.objects.filter(workflow=workflow)))
+    assert_equal(5, len(Link.objects.filter(parent__workflow=workflow)))
+    nodes = [Node.objects.filter(workflow=workflow, node_type='java')[0].get_full_node(),
+             Node.objects.filter(workflow=workflow, node_type='java')[1].get_full_node()]
+    assert_equal('org.apache.hadoop.examples.terasort.TeraGen', nodes[0].main_class)
+    assert_equal('["${records}","${output_dir}/teragen"]', nodes[0].args)
+    assert_equal('org.apache.hadoop.examples.terasort.TeraSort', nodes[1].main_class)
+    assert_equal('["${output_dir}/teragen","${output_dir}/terasort"]', nodes[1].args)
+    workflow.delete()
+
+
 class TestPermissions(OozieBase):
 
   def setUp(self):

+ 1 - 0
apps/oozie/src/oozie/urls.py

@@ -39,6 +39,7 @@ urlpatterns += patterns(
   url(r'^schedule_workflow/(?P<workflow>\d+)$', 'schedule_workflow', name='schedule_workflow'),
 
   url(r'^import_action/(?P<workflow>\d+)/(?P<parent_action_id>\d+)$', 'import_action', name='import_action'),
+  url(r'^import_workflow/$', 'import_workflow', name='import_workflow'),
 
   url(r'^list_coordinators/(?P<workflow_id>[-\w]+)?$', 'list_coordinators', name='list_coordinators'),
   url(r'^create_coordinator/(?P<workflow>[-\w]+)?$', 'create_coordinator', name='create_coordinator'),

+ 36 - 2
apps/oozie/src/oozie/views/editor.py

@@ -21,7 +21,6 @@ except ImportError:
   import simplejson as json
 import logging
 
-
 from django.core.urlresolvers import reverse
 from django.db.models import Q
 from django.forms.formsets import formset_factory
@@ -42,13 +41,15 @@ from oozie.conf import SHARE_JOBS
 from oozie.decorators import check_job_access_permission, check_job_edition_permission,\
                              check_action_access_permission, check_action_edition_permission,\
                              check_dataset_access_permission, check_dataset_edition_permission
+from oozie.import_workflow import import_workflow as _import_workflow
 from oozie.import_jobsub import convert_jobsub_design
 from oozie.management.commands import oozie_setup
 from oozie.models import Job, Workflow, History, Coordinator,Mapreduce, Java, Streaming,\
                          Dataset, DataInput, DataOutput, ACTION_TYPES
 from oozie.forms import WorkflowForm, CoordinatorForm, DatasetForm,\
   DataInputForm, DataInputSetForm, DataOutputForm, DataOutputSetForm, LinkForm,\
-  DefaultLinkForm, design_form_by_type, ImportJobsubDesignForm, ParameterForm
+  DefaultLinkForm, design_form_by_type, ImportJobsubDesignForm, ParameterForm,\
+  ImportWorkflowForm
 
 
 LOG = logging.getLogger(__name__)
@@ -580,3 +581,36 @@ def setup_app(request):
     raise PopupException(_('The app setup could complete.'), detail=e)
   return redirect(reverse('oozie:list_workflows'))
 
+
+def import_workflow(request):
+  workflow = Workflow.objects.new_workflow(request.user)
+
+  if request.method == 'POST':
+    workflow_form = ImportWorkflowForm(request.POST, instance=workflow)
+
+    if workflow_form.is_valid():
+      workflow.save()
+
+      workflow_definition = workflow_form.cleaned_data['definition']
+      schema_version = workflow_form.cleaned_data['schema_version']
+
+      try:
+        _import_workflow(workflow=workflow, workflow_definition=workflow_definition, schema_version=schema_version)
+        request.info(_('Workflow imported'))
+        return redirect(reverse('oozie:edit_workflow', kwargs={'workflow': workflow.id}))
+
+      except Exception, e:
+        request.error(_('Could not import workflow: %s' % e))
+        Workflow.objects.destroy(workflow, request.fs)
+        raise PopupException(_('Could not import workflow.'), detail=e)
+
+    else:
+      request.error(_('Errors on the form: %s') % workflow_form.errors)
+
+  else:
+    workflow_form = ImportWorkflowForm(instance=workflow)
+
+  return render('editor/import_workflow.mako', request, {
+    'workflow_form': workflow_form,
+    'workflow': workflow,
+  })

+ 35 - 0
apps/oozie/src/oozie/xslt/0.4/action.xslt

@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="extensions/distcp.0.1.xslt"/>
+<xsl:import href="extensions/hive.0.1.xslt"/>
+<xsl:import href="extensions/hive.0.2.xslt"/>
+<xsl:import href="extensions/shell.0.1.xslt"/>
+<xsl:import href="extensions/sqoop.0.1.xslt"/>
+<xsl:import href="extensions/sqoop.0.2.xslt"/>
+<xsl:import href="extensions/ssh.0.1.xslt"/>
+<xsl:import href="nodes/java.xslt"/>
+<xsl:import href="nodes/mapreduce.xslt"/>
+<xsl:import href="nodes/pig.xslt"/>
+<xsl:import href="nodes/streaming.xslt"/>
+
+<xsl:template match="action">
+
+  <object model="oozie.node" pk="0">
+
+    <field name="name" type="CharField">
+      <xsl:value-of select="@name"/>
+    </field>
+    <field name="node_type" type="CharField">
+      <xsl:value-of select="name(.)"/>
+    </field>
+
+  </object>
+
+  <xsl:apply-templates select="*"/>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 30 - 0
apps/oozie/src/oozie/xslt/0.4/control.xslt

@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="decision.xslt"/>
+<xsl:import href="end.xslt"/>
+<xsl:import href="fork.xslt"/>
+<xsl:import href="join.xslt"/>
+<xsl:import href="kill.xslt"/>
+<xsl:import href="start.xslt"/>
+
+<xsl:template match="start | end | decision | fork | join | kill">
+
+  <object model="oozie.node" pk="0">
+
+    <field name="name" type="CharField">
+      <xsl:value-of select="@name"/>
+    </field>
+    <field name="node_type" type="CharField">
+      <xsl:value-of select="name(.)"/>
+    </field>
+
+  </object>
+
+  <xsl:apply-imports/>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 37 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/distcp.0.1.xslt

@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+
+<xsl:template match="distcp:distcp" xmlns:distcp="uri:oozie:distcp-action:0.1">
+
+  <object model="oozie.distcp" pk="0">
+
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <field name="params" type="CharField">
+      <xsl:text>[</xsl:text>
+      <xsl:for-each select="*[local-name()='arg']">
+        <xsl:choose>
+          <xsl:when test="position() &lt; last()">
+            <xsl:text><![CDATA[{"type":"arg","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["},]]></xsl:text>
+          </xsl:when>
+          <xsl:otherwise>
+            <xsl:text><![CDATA[{"type":"arg","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["}]]></xsl:text>
+          </xsl:otherwise>
+        </xsl:choose>
+      </xsl:for-each>
+      <xsl:text>]</xsl:text>
+    </field>
+    <xsl:call-template name="prepares"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 30 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/hive.0.1.xslt

@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/archives.xslt"/>
+<xsl:import href="../fields/files.xslt"/>
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+<xsl:import href="../fields/script_path.xslt"/>
+
+<xsl:template match="hive:hive" xmlns:hive="uri:oozie:hive-action:0.1">
+
+  <object model="oozie.hive" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="prepares"/>
+    <xsl:call-template name="script_path"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 30 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/hive.0.2.xslt

@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/archives.xslt"/>
+<xsl:import href="../fields/files.xslt"/>
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+<xsl:import href="../fields/script_path.xslt"/>
+
+<xsl:template match="hive:hive" xmlns:hive="uri:oozie:hive-action:0.2">
+
+  <object model="oozie.hive" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="prepares"/>
+    <xsl:call-template name="script_path"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 32 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/shell.0.1.xslt

@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/archives.xslt"/>
+<xsl:import href="../fields/capture_output.xslt"/>
+<xsl:import href="../fields/command.xslt"/>
+<xsl:import href="../fields/files.xslt"/>
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+
+<xsl:template match="shell:shell" xmlns:shell="uri:oozie:shell-action:0.1">
+
+  <object model="oozie.shell" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="capture_output"/>
+    <xsl:call-template name="command"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="prepares"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 30 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/sqoop.0.1.xslt

@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/archives.xslt"/>
+<xsl:import href="../fields/files.xslt"/>
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+<xsl:import href="../fields/script_path.xslt"/>
+
+<xsl:template match="sqoop:sqoop" xmlns:sqoop="uri:oozie:sqoop-action:0.1">
+
+  <object model="oozie.sqoop" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="prepares"/>
+    <xsl:call-template name="script_path"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 31 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/sqoop.0.2.xslt

@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/archives.xslt"/>
+<xsl:import href="../fields/files.xslt"/>
+<xsl:import href="../fields/job_properties.xslt"/>
+<xsl:import href="../fields/job_xml.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/prepares.xslt"/>
+
+<xsl:template match="sqoop:sqoop" xmlns:sqoop="uri:oozie:sqoop-action:0.2">
+
+  <object model="oozie.sqoop" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="prepares"/>
+    <field name="script_path" type="CharField">
+      <xsl:value-of select="*[local-name()='command']"/>
+    </field>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 26 - 0
apps/oozie/src/oozie/xslt/0.4/extensions/ssh.0.1.xslt

@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="../fields/capture_output.xslt"/>
+<xsl:import href="../fields/command.xslt"/>
+<xsl:import href="../fields/host.xslt"/>
+<xsl:import href="../fields/params.xslt"/>
+<xsl:import href="../fields/user.xslt"/>
+
+<xsl:template match="ssh:ssh" xmlns:ssh="uri:oozie:ssh-action:0.1">
+
+  <object model="oozie.ssh" pk="0">
+
+    <xsl:call-template name="capture_output"/>
+    <xsl:call-template name="command"/>
+    <xsl:call-template name="host"/>
+    <xsl:call-template name="params"/>
+    <xsl:call-template name="user"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/decision.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="decision">
+
+  <object model="oozie.decision" pk="0">
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/end.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="end">
+
+  <object model="oozie.end" pk="0">
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 20 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/archives.xslt

@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="archives">
+
+  <field name="archives" type="TextField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='archive']">
+      <xsl:text><![CDATA["]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["]]></xsl:text>
+      <xsl:if  test="position() &lt; last()">
+        <xsl:text>,</xsl:text>
+      </xsl:if>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 20 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/args.xslt

@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="args">
+
+  <field name="args" type="CharField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='arg']">
+      <xsl:text><![CDATA["]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["]]></xsl:text>
+      <xsl:if  test="position() &lt; last()">
+        <xsl:text>,</xsl:text>
+      </xsl:if>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 24 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/arguments.xslt

@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="arguments">
+
+  <field name="params" type="TextField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='argument']">
+      <xsl:choose>
+        <xsl:when test="position() &lt; last()">
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["},]]></xsl:text>
+        </xsl:when>
+        <xsl:otherwise>
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["}]]></xsl:text>
+        </xsl:otherwise>
+      </xsl:choose>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 21 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/capture_output.xslt

@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="capture_output">
+
+  <field name="capture_output" type="BooleanField">
+    <xsl:choose>
+      <xsl:when test="*[local-name()='exec']">
+        True
+      </xsl:when>
+      <xsl:otherwise>
+        False
+      </xsl:otherwise>
+    </xsl:choose>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/command.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="command">
+
+  <field name="command" type="CharField">
+    <xsl:value-of select="*[local-name()='command']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 20 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/files.xslt

@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="files">
+
+  <field name="files" type="TextField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='file']">
+      <xsl:text><![CDATA["]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["]]></xsl:text>
+      <xsl:if  test="position() &lt; last()">
+        <xsl:text>,</xsl:text>
+      </xsl:if>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/host.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="host">
+
+  <field name="host" type="CharField">
+    <xsl:value-of select="*[local-name()='host']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/jar_path.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="jar_path">
+
+  <field name="jar_path" type="CharField">
+    <xsl:value-of select="*[local-name()='jar-path']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/java_opts.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="java_opts">
+
+  <field name="java_opts" type="CharField">
+    <xsl:value-of select="*[local-name()='java-opts']"/>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 24 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/job_properties.xslt

@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="job_properties">
+
+  <field name="job_properties" type="TextField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='configuration']/*[local-name()='property']">
+      <xsl:choose>
+        <xsl:when test="position() &lt; last()">
+          <xsl:text><![CDATA[{"name":"]]></xsl:text><xsl:value-of select="*[local-name()='name']" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="*[local-name()='value']" /><xsl:text><![CDATA["},]]></xsl:text>
+        </xsl:when>
+        <xsl:otherwise>
+          <xsl:text><![CDATA[{"name":"]]></xsl:text><xsl:value-of select="*[local-name()='name']" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="*[local-name() ='value']" /><xsl:text><![CDATA["}]]></xsl:text>
+        </xsl:otherwise>
+      </xsl:choose>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/job_xml.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="job_xml">
+
+  <field name="job_xml" type="CharField">
+    <xsl:value-of select="*[local-name()='job-xml']"/>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/main_class.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="main_class">
+
+  <field name="main_class" type="CharField">
+    <xsl:value-of select="*[local-name()='main-class']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/mapper.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="mapper">
+
+  <field name="mapper" type="CharField">
+    <xsl:value-of select="*[local-name()='streaming']/*[local-name()='mapper']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 24 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/params.xslt

@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="params">
+
+  <field name="params" type="TextField">
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='param']">
+      <xsl:choose>
+        <xsl:when test="position() &lt; last()">
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["},]]></xsl:text>
+        </xsl:when>
+        <xsl:otherwise>
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="." /><xsl:text><![CDATA["}]]></xsl:text>
+        </xsl:otherwise>
+      </xsl:choose>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 26 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/prepares.xslt

@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="prepares">
+
+  <field name="prepares" type="TextField">
+
+    <xsl:text>[</xsl:text>
+    <xsl:for-each select="*[local-name()='prepare']/*">
+      <xsl:choose>
+        <xsl:when test="position() &lt; last()">
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="@path" /><xsl:text><![CDATA["},]]></xsl:text>
+        </xsl:when>
+        <xsl:otherwise>
+          <xsl:text><![CDATA[{"type":"]]></xsl:text><xsl:value-of select="local-name()" /><xsl:text><![CDATA[","value":"]]></xsl:text><xsl:value-of select="@path" /><xsl:text><![CDATA["}]]></xsl:text>
+        </xsl:otherwise>
+      </xsl:choose>
+    </xsl:for-each>
+    <xsl:text>]</xsl:text>
+
+  </field>
+
+</xsl:template>
+
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/reducer.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="reducer">
+
+  <field name="reducer" type="CharField">
+    <xsl:value-of select="*[local-name()='streaming']/*[local-name()='reducer']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/script_path.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="script_path">
+
+  <field name="script_path" type="CharField">
+    <xsl:value-of select="*[local-name()='script']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fields/user.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template name="user">
+
+  <field name="user" type="CharField">
+    <xsl:value-of select="*[local-name()='user']"/>
+  </field>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/fork.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="fork">
+
+  <object model="oozie.fork" pk="0">
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 34 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/java.xslt

@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="fields/archives.xslt"/>
+<xsl:import href="fields/args.xslt"/>
+<xsl:import href="fields/files.xslt"/>
+<xsl:import href="fields/job_xml.xslt"/>
+<xsl:import href="fields/java_opts.xslt"/>
+<xsl:import href="fields/jar_path.xslt"/>
+<xsl:import href="fields/job_properties.xslt"/>
+<xsl:import href="fields/main_class.xslt"/>
+<xsl:import href="fields/prepares.xslt"/>
+
+<xsl:template match="java">
+
+  <object model="oozie.java" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="args"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="jar_path"/>
+    <xsl:call-template name="java_opts"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="main_class"/>
+    <xsl:call-template name="prepares"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/join.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="join">
+
+  <object model="oozie.join" pk="0">
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 13 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/kill.xslt

@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="kill">
+
+  <object model="oozie.kill" pk="0">
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 28 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/mapreduce.xslt

@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="fields/archives.xslt"/>
+<xsl:import href="fields/files.xslt"/>
+<xsl:import href="fields/jar_path.xslt"/>
+<xsl:import href="fields/job_properties.xslt"/>
+<xsl:import href="fields/job_xml.xslt"/>
+<xsl:import href="fields/prepares.xslt"/>
+
+<xsl:template match="map-reduce">
+
+  <object model="oozie.mapreduce" pk="0">
+
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="jar_path"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="prepares"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 30 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/pig.xslt

@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="fields/archives.xslt"/>
+<xsl:import href="fields/arguments.xslt"/>
+<xsl:import href="fields/files.xslt"/>
+<xsl:import href="fields/job_properties.xslt"/>
+<xsl:import href="fields/job_xml.xslt"/>
+<xsl:import href="fields/prepares.xslt"/>
+<xsl:import href="fields/script_path.xslt"/>
+
+<xsl:template match="pig">
+
+  <object model="oozie.pig" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="arguments"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="job_xml"/>
+    <xsl:call-template name="prepares"/>
+    <xsl:call-template name="script_path"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 14 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/start.xslt

@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:template match="start">
+
+  <object model="oozie.start" pk="0">
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 26 - 0
apps/oozie/src/oozie/xslt/0.4/nodes/streaming.xslt

@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:import href="fields/archives.xslt"/>
+<xsl:import href="fields/files.xslt"/>
+<xsl:import href="fields/job_properties.xslt"/>
+<xsl:import href="fields/mapper.xslt"/>
+<xsl:import href="fields/reducer.xslt"/>
+
+<xsl:template match="streaming">
+
+  <object model="oozie.streaming" pk="0">
+
+    <xsl:call-template name="archives"/>
+    <xsl:call-template name="files"/>
+    <xsl:call-template name="job_properties"/>
+    <xsl:call-template name="mapper"/>
+    <xsl:call-template name="reducer"/>
+
+  </object>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>

+ 20 - 0
apps/oozie/src/oozie/xslt/0.4/workflow.xslt

@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<xsl:include href="action.xslt"/>
+<xsl:include href="control.xslt"/>
+
+<xsl:template match="/workflow-app">
+
+  <django-objects version="1.0">
+
+    <xsl:apply-templates select="action"/>
+    <xsl:apply-templates select="start | end | decision | fork | join | kill"/>
+
+  </django-objects>
+
+</xsl:template>
+
+<xsl:output method="xml" version="1.0" encoding="UTF-8" indent="yes"/>
+</xsl:stylesheet>