|
@@ -15,262 +15,327 @@
|
|
|
# See the License for the specific language governing permissions and
|
|
# See the License for the specific language governing permissions and
|
|
|
# limitations under the License.
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
|
+try:
|
|
|
|
|
+ import json
|
|
|
|
|
+except ImportError:
|
|
|
|
|
+ import simplejson as json
|
|
|
|
|
+import logging
|
|
|
|
|
+
|
|
|
|
|
+from nose.plugins.skip import SkipTest
|
|
|
from nose.tools import assert_true, assert_false, assert_equal, assert_not_equal
|
|
from nose.tools import assert_true, assert_false, assert_equal, assert_not_equal
|
|
|
from django.core.urlresolvers import reverse
|
|
from django.core.urlresolvers import reverse
|
|
|
|
|
|
|
|
from desktop.lib.django_test_util import make_logged_in_client
|
|
from desktop.lib.django_test_util import make_logged_in_client
|
|
|
-
|
|
|
|
|
-from oozie.models import Workflow, Node, Job, Coordinator, Fork
|
|
|
|
|
-from oozie import conf
|
|
|
|
|
from desktop.lib.test_utils import grant_access
|
|
from desktop.lib.test_utils import grant_access
|
|
|
|
|
+from liboozie import oozie_api
|
|
|
|
|
+from liboozie.types import WorkflowList, Workflow as OozieWorkflow, Coordinator as OozieCoordinator,\
|
|
|
|
|
+ CoordinatorList
|
|
|
|
|
|
|
|
|
|
+from oozie.models import Workflow, Node, Job, Coordinator, Fork
|
|
|
|
|
+from oozie.conf import SHARE_JOBS
|
|
|
|
|
|
|
|
-def test_find_paramters():
|
|
|
|
|
- jobs = [Job(name="$a"),
|
|
|
|
|
- Job(name="foo $b $$"),
|
|
|
|
|
- Job(name="${foo}", description="xxx ${foo}")]
|
|
|
|
|
-
|
|
|
|
|
- result = [job.find_parameters(['name', 'description']) for job in jobs]
|
|
|
|
|
- assert_equal(set(["a", "b", "foo"]), reduce(lambda x, y: x | set(y), result, set()))
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-def test_create_workflow():
|
|
|
|
|
- create_workflow()
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-def test_move_up():
|
|
|
|
|
- c = make_logged_in_client()
|
|
|
|
|
-
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
-
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2
|
|
|
|
|
- # 3
|
|
|
|
|
- action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
- action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
- action3 = Node.objects.get(name='action-name-3')
|
|
|
|
|
-
|
|
|
|
|
- # 1 2 3
|
|
|
|
|
- move_up(c, wf, action2)
|
|
|
|
|
- move_up(c, wf, action3)
|
|
|
|
|
-
|
|
|
|
|
- # 1 2
|
|
|
|
|
- # 3
|
|
|
|
|
- move_up(c, wf, action1)
|
|
|
|
|
- move_up(c, wf, action2)
|
|
|
|
|
-
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2
|
|
|
|
|
- # 3
|
|
|
|
|
- move_up(c, wf, action2)
|
|
|
|
|
-
|
|
|
|
|
- # 1 2
|
|
|
|
|
- # 3
|
|
|
|
|
- action4 = add_action(wf.id, action2.id, 'name-4')
|
|
|
|
|
- move_up(c, wf, action4)
|
|
|
|
|
-
|
|
|
|
|
- # 1 2 3 4
|
|
|
|
|
|
|
+LOG = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
-def test_move_down():
|
|
|
|
|
- c = make_logged_in_client()
|
|
|
|
|
|
|
+# Mock Lib Oozie
|
|
|
|
|
+oozie_api.get_oozie = lambda: MockOozieApi()
|
|
|
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
|
|
|
|
|
- action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
- action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
- action3 = Node.objects.get(name='action-name-3')
|
|
|
|
|
|
|
+class TestEditor:
|
|
|
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2
|
|
|
|
|
- # 3
|
|
|
|
|
- move_down(c, wf, action1)
|
|
|
|
|
- move_down(c, wf, action2)
|
|
|
|
|
|
|
+ def setUp(self):
|
|
|
|
|
+ Workflow.objects.all().delete()
|
|
|
|
|
+ Coordinator.objects.all().delete()
|
|
|
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2
|
|
|
|
|
- # 3
|
|
|
|
|
- move_down(c, wf, action2)
|
|
|
|
|
- move_down(c, wf, action1)
|
|
|
|
|
|
|
+ self.c = make_logged_in_client()
|
|
|
|
|
+ self.wf = create_workflow()
|
|
|
|
|
|
|
|
- # 1 2 3
|
|
|
|
|
- move_down(c, wf, action3)
|
|
|
|
|
- move_down(c, wf, action2)
|
|
|
|
|
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2 3
|
|
|
|
|
- action4 = add_action(wf.id, action2.id, 'name-4')
|
|
|
|
|
|
|
+ def test_find_paramters(self):
|
|
|
|
|
+ jobs = [Job(name="$a"),
|
|
|
|
|
+ Job(name="foo $b $$"),
|
|
|
|
|
+ Job(name="${foo}", description="xxx ${foo}")]
|
|
|
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2 3
|
|
|
|
|
- # 4
|
|
|
|
|
- move_down(c, wf, action4)
|
|
|
|
|
- move_down(c, wf, action3)
|
|
|
|
|
- move_down(c, wf, action4)
|
|
|
|
|
|
|
+ result = [job.find_parameters(['name', 'description']) for job in jobs]
|
|
|
|
|
+ assert_equal(set(["a", "b", "foo"]), reduce(lambda x, y: x | set(y), result, set()))
|
|
|
|
|
|
|
|
- # 1
|
|
|
|
|
- # 2
|
|
|
|
|
- # 3
|
|
|
|
|
- # 4
|
|
|
|
|
|
|
|
|
|
|
|
+ def test_create_workflow(self):
|
|
|
|
|
+ # Done in the setUp
|
|
|
|
|
+ pass
|
|
|
|
|
|
|
|
-def test_decision_node():
|
|
|
|
|
- c = make_logged_in_client()
|
|
|
|
|
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
-
|
|
|
|
|
- action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
- action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
-
|
|
|
|
|
- move_down(c, wf, action1)
|
|
|
|
|
- fork = action1.get_parent()
|
|
|
|
|
-
|
|
|
|
|
- # 1 2
|
|
|
|
|
- # 3
|
|
|
|
|
- reponse = c.get(reverse('oozie:edit_workflow_fork', args=[fork.id]), {}, follow=True)
|
|
|
|
|
- assert_equal(200, reponse.status_code)
|
|
|
|
|
-
|
|
|
|
|
- assert_false(fork.has_decisions())
|
|
|
|
|
-
|
|
|
|
|
- reponse = c.post(reverse('oozie:edit_workflow_fork', args=[fork.id]), {
|
|
|
|
|
- u'form-MAX_NUM_FORMS': [u'0'], u'form-TOTAL_FORMS': [u'2'], u'form-INITIAL_FORMS': [u'2'],
|
|
|
|
|
- u'form-0-comment': [u'output'], u'form-0-id': [action1.id],
|
|
|
|
|
- u'form-1-comment': [u'output'], u'form-1-id': [action2.id],
|
|
|
|
|
- u'child': [wf.end.id]}, follow=True)
|
|
|
|
|
- assert_equal(200, reponse.status_code)
|
|
|
|
|
-
|
|
|
|
|
- #assert_equal(Fork.ACTION_DECISION_TYPE, fork.node_type)
|
|
|
|
|
- #assert_true(fork.has_decisions(), reponse.content)
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-def test_workflow_gen_xml():
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
-
|
|
|
|
|
- assert_equal(
|
|
|
|
|
- '<workflow-app name="wf-name-1" xmlns="uri:oozie:workflow:0.2">\n'
|
|
|
|
|
- ' <start to="action-name-1"/>\n'
|
|
|
|
|
- ' <action name="action-name-1">\n'
|
|
|
|
|
- ' <map-reduce>\n'
|
|
|
|
|
- ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
- ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
- ' </map-reduce>\n'
|
|
|
|
|
- ' <ok to="action-name-2"/>\n'
|
|
|
|
|
- ' <error to="kill"/>\n'
|
|
|
|
|
- ' </action>\n'
|
|
|
|
|
- ' <action name="action-name-2">\n'
|
|
|
|
|
- ' <map-reduce>\n'
|
|
|
|
|
- ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
- ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
- ' </map-reduce>\n'
|
|
|
|
|
- ' <ok to="action-name-3"/>\n'
|
|
|
|
|
- ' <error to="kill"/>\n'
|
|
|
|
|
- ' </action>\n'
|
|
|
|
|
- ' <action name="action-name-3">\n'
|
|
|
|
|
- ' <map-reduce>\n'
|
|
|
|
|
- ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
- ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
- ' </map-reduce>\n'
|
|
|
|
|
- ' <ok to="end"/>\n'
|
|
|
|
|
- ' <error to="kill"/>\n'
|
|
|
|
|
- ' </action>\n'
|
|
|
|
|
- ' <kill name="kill">\n'
|
|
|
|
|
- ' <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n'
|
|
|
|
|
- ' </kill>\n'
|
|
|
|
|
- ' <end name="end"/>\n'
|
|
|
|
|
- '</workflow-app>'.split(), wf.to_xml().split())
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-def test_workflow_permissions():
|
|
|
|
|
- c = make_logged_in_client()
|
|
|
|
|
|
|
+ def test_move_up(self):
|
|
|
|
|
+ action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
+ action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
+ action3 = Node.objects.get(name='action-name-3')
|
|
|
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ move_up(self.c, self.wf, action2)
|
|
|
|
|
+ move_up(self.c, self.wf, action3)
|
|
|
|
|
|
|
|
- response = c.get(reverse('oozie:edit_workflow', args=[wf.id]))
|
|
|
|
|
|
|
+ # 1 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ move_up(self.c, self.wf, action1)
|
|
|
|
|
+ move_up(self.c, self.wf, action2)
|
|
|
|
|
|
|
|
- # Login as someone else
|
|
|
|
|
- client_not_me = make_logged_in_client(username='not_me', is_superuser=False, groupname='test')
|
|
|
|
|
- grant_access("not_me", "test", "oozie")
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ move_up(self.c, self.wf, action2)
|
|
|
|
|
|
|
|
|
|
+ # 1 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ action4 = add_action(self.wf.id, action2.id, 'name-4')
|
|
|
|
|
+ move_up(self.c, self.wf, action4)
|
|
|
|
|
|
|
|
- # Edit
|
|
|
|
|
- finish = conf.SHARE_JOBS.set_for_testing(True)
|
|
|
|
|
- try:
|
|
|
|
|
- resp = client_not_me.get(reverse('oozie:edit_workflow', args=[wf.id]))
|
|
|
|
|
- assert_true('wf-name-1' in resp.content, resp.content)
|
|
|
|
|
- finally:
|
|
|
|
|
- finish()
|
|
|
|
|
- finish = conf.SHARE_JOBS.set_for_testing(False)
|
|
|
|
|
- try:
|
|
|
|
|
- resp = client_not_me.get(reverse('oozie:edit_workflow', args=[wf.id]))
|
|
|
|
|
- assert_false('wf-name-1' in resp.content, resp.content)
|
|
|
|
|
- finally:
|
|
|
|
|
- finish()
|
|
|
|
|
-
|
|
|
|
|
- # Share
|
|
|
|
|
- wf.is_shared = True
|
|
|
|
|
- wf.save()
|
|
|
|
|
- finish = conf.SHARE_JOBS.set_for_testing(True)
|
|
|
|
|
- try:
|
|
|
|
|
- resp = client_not_me.get(reverse('oozie:edit_workflow', args=[wf.id]))
|
|
|
|
|
- assert_true('wf-name-1' in resp.content, resp.content)
|
|
|
|
|
- finally:
|
|
|
|
|
- finish()
|
|
|
|
|
|
|
+ # 1 2 3 4
|
|
|
|
|
|
|
|
- # Delete
|
|
|
|
|
- finish = conf.SHARE_JOBS.set_for_testing(False)
|
|
|
|
|
- try:
|
|
|
|
|
- resp = client_not_me.post(reverse('oozie:delete_workflow', args=[wf.id]))
|
|
|
|
|
- assert_true('Permission denied' in resp.content, resp.content)
|
|
|
|
|
- finally:
|
|
|
|
|
- finish()
|
|
|
|
|
-
|
|
|
|
|
- response = c.post(reverse('oozie:delete_workflow', args=[wf.id]), follow=True)
|
|
|
|
|
- assert_equal(200, response.status_code)
|
|
|
|
|
|
|
|
|
|
|
|
+ def test_move_down(self):
|
|
|
|
|
+ action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
+ action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
+ action3 = Node.objects.get(name='action-name-3')
|
|
|
|
|
|
|
|
-# test multi fork
|
|
|
|
|
-# test submit wf
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ move_down(self.c, self.wf, action1)
|
|
|
|
|
+ move_down(self.c, self.wf, action2)
|
|
|
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ move_down(self.c, self.wf, action2)
|
|
|
|
|
+ move_down(self.c, self.wf, action1)
|
|
|
|
|
|
|
|
-def test_coordinator_gen_xml():
|
|
|
|
|
- Workflow.objects.all().delete()
|
|
|
|
|
- Coordinator.objects.all().delete()
|
|
|
|
|
|
|
+ # 1 2 3
|
|
|
|
|
+ move_down(self.c, self.wf, action3)
|
|
|
|
|
+ move_down(self.c, self.wf, action2)
|
|
|
|
|
|
|
|
- wf = create_workflow()
|
|
|
|
|
- coord = create_coordinator(wf)
|
|
|
|
|
-
|
|
|
|
|
- assert_equal(
|
|
|
|
|
- '<coordinator-app name="MyCoord"\n'
|
|
|
|
|
- ' frequency="${coord:days(1)}"\n'
|
|
|
|
|
- ' start="2012-07-01T00:00Z" end="2012-07-04T00:00Z" timezone="America/Los_Angeles"\n'
|
|
|
|
|
- ' xmlns="uri:oozie:coordinator:0.1">\n'
|
|
|
|
|
- ' <!--\n'
|
|
|
|
|
- ' <controls>\n'
|
|
|
|
|
- ' <timeout>[TIME_PERIOD]</timeout>\n'
|
|
|
|
|
- ' <concurrency>[CONCURRENCY]</concurrency>\n'
|
|
|
|
|
- ' <execution>[EXECUTION_STRATEGY]</execution>\n'
|
|
|
|
|
- ' </controls>\n'
|
|
|
|
|
- ' -->\n'
|
|
|
|
|
- ' <action>\n'
|
|
|
|
|
- ' <workflow>\n'
|
|
|
|
|
- ' <app-path>${wf_application_path}</app-path>\n'
|
|
|
|
|
- ' <configuration>\n'
|
|
|
|
|
- ' </configuration>\n'
|
|
|
|
|
- ' </workflow>\n'
|
|
|
|
|
- ' </action>\n'
|
|
|
|
|
- '</coordinator-app>\n'.split(), coord.to_xml().split())
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2 3
|
|
|
|
|
+ action4 = add_action(self.wf.id, action2.id, 'name-4')
|
|
|
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2 3
|
|
|
|
|
+ # 4
|
|
|
|
|
+ move_down(self.c, self.wf, action4)
|
|
|
|
|
+ move_down(self.c, self.wf, action3)
|
|
|
|
|
+ move_down(self.c, self.wf, action4)
|
|
|
|
|
|
|
|
|
|
+ # 1
|
|
|
|
|
+ # 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ # 4
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_clone_workflow(self):
|
|
|
|
|
+ workflow_count = Workflow.objects.count()
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.post(reverse('oozie:clone_workflow', args=[self.wf.id]), {}, follow=True)
|
|
|
|
|
+
|
|
|
|
|
+ assert_equal(workflow_count + 1, Workflow.objects.count(), response)
|
|
|
|
|
+ wf2 = Workflow.objects.latest('id')
|
|
|
|
|
+ assert_equal(self.wf.node_set.count(), wf2.node_set.count())
|
|
|
|
|
+
|
|
|
|
|
+ assert_not_equal(self.wf.id, wf2.id)
|
|
|
|
|
+ node_ids = set(self.wf.node_set.values_list('id', flat=True))
|
|
|
|
|
+ for node in wf2.node_set.all():
|
|
|
|
|
+ assert_false(node.id in node_ids)
|
|
|
|
|
+
|
|
|
|
|
+ raise SkipTest
|
|
|
|
|
+ # To Fix
|
|
|
|
|
+ assert_not_equal(self.wf.deployment_dir, wf2.deployment_dir)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_clone_node(self):
|
|
|
|
|
+ action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
+
|
|
|
|
|
+ node_count = self.wf.actions.count()
|
|
|
|
|
+ assert_true(1, len(action1.get_children()))
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.get(reverse('oozie:clone_action', args=[action1.id]), {}, follow=True)
|
|
|
|
|
+
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_not_equal(action1.id, action1.get_children()[1].id)
|
|
|
|
|
+ assert_true(2, len(action1.get_children()))
|
|
|
|
|
+ assert_equal(node_count + 1, self.wf.actions.count())
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_decision_node(self):
|
|
|
|
|
+ action1 = Node.objects.get(name='action-name-1')
|
|
|
|
|
+ action2 = Node.objects.get(name='action-name-2')
|
|
|
|
|
+ action3 = Node.objects.get(name='action-name-3')
|
|
|
|
|
+
|
|
|
|
|
+ move_down(self.c, self.wf, action1)
|
|
|
|
|
+ fork = action1.get_parent()
|
|
|
|
|
+
|
|
|
|
|
+ # 1 2
|
|
|
|
|
+ # 3
|
|
|
|
|
+ response = self.c.get(reverse('oozie:edit_workflow_fork', args=[fork.id]), {}, follow=True)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+
|
|
|
|
|
+ assert_false(fork.has_decisions())
|
|
|
|
|
+
|
|
|
|
|
+ # Missing information for converting to decision
|
|
|
|
|
+ response = self.c.post(reverse('oozie:edit_workflow_fork', args=[fork.id]), {
|
|
|
|
|
+ u'form-MAX_NUM_FORMS': [u'0'], u'form-TOTAL_FORMS': [u'2'], u'form-INITIAL_FORMS': [u'2'],
|
|
|
|
|
+ u'form-0-comment': [u''], u'form-0-id': [u'%s' % action1.id],
|
|
|
|
|
+ u'form-1-comment': [u''], u'form-1-id': [u'%s' % action2.id],
|
|
|
|
|
+ u'child': [u'%s' % self.wf.end.id]}, follow=True)
|
|
|
|
|
+ assert_true('This field is required' in response.content, response.content)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_false(fork.has_decisions())
|
|
|
|
|
+
|
|
|
|
|
+ # Convert to decision
|
|
|
|
|
+ response = self.c.post(reverse('oozie:edit_workflow_fork', args=[fork.id]), {
|
|
|
|
|
+ u'form-MAX_NUM_FORMS': [u'0'], u'form-TOTAL_FORMS': [u'2'], u'form-INITIAL_FORMS': [u'2'],
|
|
|
|
|
+ u'form-0-comment': [u'output'], u'form-0-id': [u'%s' % action1.id],
|
|
|
|
|
+ u'form-1-comment': [u'output'], u'form-1-id': [u'%s' % action2.id],
|
|
|
|
|
+ u'child': [u'%s' % self.wf.end.id]}, follow=True)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+
|
|
|
|
|
+ raise SkipTest
|
|
|
|
|
+ # Mystery below, link_formset.save() does not appear to save the links during a test
|
|
|
|
|
+ assert_equal(Fork.ACTION_DECISION_TYPE, fork.node_type)
|
|
|
|
|
+ assert_true(fork.has_decisions(), response.content)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_workflow_gen_xml(self):
|
|
|
|
|
+ assert_equal(
|
|
|
|
|
+ '<workflow-app name="wf-name-1" xmlns="uri:oozie:workflow:0.2">\n'
|
|
|
|
|
+ ' <start to="action-name-1"/>\n'
|
|
|
|
|
+ ' <action name="action-name-1">\n'
|
|
|
|
|
+ ' <map-reduce>\n'
|
|
|
|
|
+ ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
+ ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
+ ' </map-reduce>\n'
|
|
|
|
|
+ ' <ok to="action-name-2"/>\n'
|
|
|
|
|
+ ' <error to="kill"/>\n'
|
|
|
|
|
+ ' </action>\n'
|
|
|
|
|
+ ' <action name="action-name-2">\n'
|
|
|
|
|
+ ' <map-reduce>\n'
|
|
|
|
|
+ ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
+ ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
+ ' </map-reduce>\n'
|
|
|
|
|
+ ' <ok to="action-name-3"/>\n'
|
|
|
|
|
+ ' <error to="kill"/>\n'
|
|
|
|
|
+ ' </action>\n'
|
|
|
|
|
+ ' <action name="action-name-3">\n'
|
|
|
|
|
+ ' <map-reduce>\n'
|
|
|
|
|
+ ' <job-tracker>${jobTracker}</job-tracker>\n'
|
|
|
|
|
+ ' <name-node>${nameNode}</name-node>\n'
|
|
|
|
|
+ ' </map-reduce>\n'
|
|
|
|
|
+ ' <ok to="end"/>\n'
|
|
|
|
|
+ ' <error to="kill"/>\n'
|
|
|
|
|
+ ' </action>\n'
|
|
|
|
|
+ ' <kill name="kill">\n'
|
|
|
|
|
+ ' <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n'
|
|
|
|
|
+ ' </kill>\n'
|
|
|
|
|
+ ' <end name="end"/>\n'
|
|
|
|
|
+ '</workflow-app>'.split(), self.wf.to_xml().split())
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_workflow_permissions(self):
|
|
|
|
|
+ response = self.c.get(reverse('oozie:edit_workflow', args=[self.wf.id]))
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+
|
|
|
|
|
+ # Login as someone else
|
|
|
|
|
+ client_not_me = make_logged_in_client(username='not_me', is_superuser=False, groupname='test')
|
|
|
|
|
+ grant_access("not_me", "test", "oozie")
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ # Edit
|
|
|
|
|
+ finish = SHARE_JOBS.set_for_testing(True)
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = client_not_me.get(reverse('oozie:edit_workflow', args=[self.wf.id]))
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_true('wf-name-1' in response.content, response.content)
|
|
|
|
|
+ finally:
|
|
|
|
|
+ finish()
|
|
|
|
|
+ finish = SHARE_JOBS.set_for_testing(False)
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = client_not_me.get(reverse('oozie:edit_workflow', args=[self.wf.id]))
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_false('wf-name-1' in response.content, response.content)
|
|
|
|
|
+ finally:
|
|
|
|
|
+ finish()
|
|
|
|
|
+
|
|
|
|
|
+ # Share
|
|
|
|
|
+ self.wf.is_shared = True
|
|
|
|
|
+ self.wf.save()
|
|
|
|
|
+ finish = SHARE_JOBS.set_for_testing(True)
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = client_not_me.get(reverse('oozie:edit_workflow', args=[self.wf.id]))
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_true('wf-name-1' in response.content, response.content)
|
|
|
|
|
+ finally:
|
|
|
|
|
+ finish()
|
|
|
|
|
+
|
|
|
|
|
+ # Delete
|
|
|
|
|
+ finish = SHARE_JOBS.set_for_testing(False)
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = client_not_me.post(reverse('oozie:delete_workflow', args=[self.wf.id]))
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+ assert_true('Permission denied' in response.content, response.content)
|
|
|
|
|
+ finally:
|
|
|
|
|
+ finish()
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.post(reverse('oozie:delete_workflow', args=[self.wf.id]), follow=True)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_coordinator_gen_xml(self):
|
|
|
|
|
+ coord = create_coordinator(self.wf)
|
|
|
|
|
+
|
|
|
|
|
+ assert_equal(
|
|
|
|
|
+ '<coordinator-app name="MyCoord"\n'
|
|
|
|
|
+ ' frequency="${coord:days(1)}"\n'
|
|
|
|
|
+ ' start="2012-07-01T00:00Z" end="2012-07-04T00:00Z" timezone="America/Los_Angeles"\n'
|
|
|
|
|
+ ' xmlns="uri:oozie:coordinator:0.1">\n'
|
|
|
|
|
+ ' <!--\n'
|
|
|
|
|
+ ' <controls>\n'
|
|
|
|
|
+ ' <timeout>[TIME_PERIOD]</timeout>\n'
|
|
|
|
|
+ ' <concurrency>[CONCURRENCY]</concurrency>\n'
|
|
|
|
|
+ ' <execution>[EXECUTION_STRATEGY]</execution>\n'
|
|
|
|
|
+ ' </controls>\n'
|
|
|
|
|
+ ' -->\n'
|
|
|
|
|
+ ' <action>\n'
|
|
|
|
|
+ ' <workflow>\n'
|
|
|
|
|
+ ' <app-path>${wf_application_path}</app-path>\n'
|
|
|
|
|
+ ' <configuration>\n'
|
|
|
|
|
+ ' </configuration>\n'
|
|
|
|
|
+ ' </workflow>\n'
|
|
|
|
|
+ ' </action>\n'
|
|
|
|
|
+ '</coordinator-app>\n'.split(), coord.to_xml().split())
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_create_coordinator_dataset(self):
|
|
|
|
|
+ coord = create_coordinator(self.wf)
|
|
|
|
|
+ create_dataset(coord)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_create_coordinator_input_data(self):
|
|
|
|
|
+ coord = create_coordinator(self.wf)
|
|
|
|
|
+ create_dataset(coord)
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.post(reverse('oozie:create_coordinator_data', args=[coord.id, 'input']),
|
|
|
|
|
+ {u'name': [u'input_dir'], u'dataset': [u'1']})
|
|
|
|
|
+ data = json.loads(response.content)
|
|
|
|
|
+ assert_equal(0, data['status'], data['data'])
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# Beware: client not consistent with TestEditor.c
|
|
|
def add_action(workflow, action, name):
|
|
def add_action(workflow, action, name):
|
|
|
c = make_logged_in_client()
|
|
c = make_logged_in_client()
|
|
|
|
|
|
|
|
response = c.post("/oozie/new_action/%s/%s/%s" % (workflow, 'mapreduce', action), {
|
|
response = c.post("/oozie/new_action/%s/%s/%s" % (workflow, 'mapreduce', action), {
|
|
|
- u'files': [u'[]'], u'name': [name], u'jar_path': [u'/tmp/.file.jar'], u'job_properties': [u'[]'], u'archives': [u'[]'], u'description': [u'']})
|
|
|
|
|
|
|
+ u'files': [u'[]'], u'name': [name], u'jar_path': [u'/tmp/.file.jar'], u'job_properties': [u'[]'], u'archives': [u'[]'], u'description': [u'']}, follow=True)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
assert_true(Node.objects.filter(name=name).exists(), response)
|
|
assert_true(Node.objects.filter(name=name).exists(), response)
|
|
|
return Node.objects.get(name=name)
|
|
return Node.objects.get(name=name)
|
|
|
|
|
|
|
@@ -282,7 +347,8 @@ def create_workflow():
|
|
|
response = c.get(reverse('oozie:create_workflow'))
|
|
response = c.get(reverse('oozie:create_workflow'))
|
|
|
assert_equal(workflow_count, Workflow.objects.count(), response)
|
|
assert_equal(workflow_count, Workflow.objects.count(), response)
|
|
|
|
|
|
|
|
- response = c.post(reverse('oozie:create_workflow'), {u'deployment_dir': [u''], u'name': [u'wf-name-1'], u'description': [u'']})
|
|
|
|
|
|
|
+ response = c.post(reverse('oozie:create_workflow'), {u'deployment_dir': [u''], u'name': [u'wf-name-1'], u'description': [u'']}, follow=True)
|
|
|
|
|
+ assert_equal(200, response.status_code)
|
|
|
assert_equal(workflow_count + 1, Workflow.objects.count(), response)
|
|
assert_equal(workflow_count + 1, Workflow.objects.count(), response)
|
|
|
|
|
|
|
|
wf = Workflow.objects.get()
|
|
wf = Workflow.objects.get()
|
|
@@ -308,10 +374,22 @@ def create_coordinator(workflow):
|
|
|
return Coordinator.objects.get()
|
|
return Coordinator.objects.get()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+def create_dataset(coord):
|
|
|
|
|
+ c = make_logged_in_client()
|
|
|
|
|
+
|
|
|
|
|
+ response = c.post(reverse('oozie:create_coordinator_dataset', args=[coord.id]),
|
|
|
|
|
+ {u'name': [u'MyDataset'], u'frequency_number': [u'1'], u'frequency_unit': [u'days'],
|
|
|
|
|
+ u'uri': [u'/data/${YEAR}${MONTH}${DAY}'], u'start': [u'2012-08-15'],
|
|
|
|
|
+ u'timezone': [u'America/Los_Angeles'], u'done_flag': [u''],
|
|
|
|
|
+ u'description': [u'']})
|
|
|
|
|
+ data = json.loads(response.content)
|
|
|
|
|
+ assert_equal(0, data['status'], data['data'])
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def move(c, wf, direction, action):
|
|
def move(c, wf, direction, action):
|
|
|
try:
|
|
try:
|
|
|
- print wf.get_hierarchy()
|
|
|
|
|
- print direction, action
|
|
|
|
|
|
|
+ LOG.info(wf.get_hierarchy())
|
|
|
|
|
+ LOG.info('%s %s' % (direction, action))
|
|
|
assert_equal(200, c.post(reverse(direction, args=[action.id]), {}, follow=True).status_code)
|
|
assert_equal(200, c.post(reverse(direction, args=[action.id]), {}, follow=True).status_code)
|
|
|
except:
|
|
except:
|
|
|
raise
|
|
raise
|
|
@@ -324,3 +402,92 @@ def move_up(c, wf, action):
|
|
|
def move_down(c, wf, action):
|
|
def move_down(c, wf, action):
|
|
|
move(c, wf, 'oozie:move_down_action', action)
|
|
move(c, wf, 'oozie:move_down_action', action)
|
|
|
|
|
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+class TestDashboard:
|
|
|
|
|
+
|
|
|
|
|
+ def setUp(self):
|
|
|
|
|
+ Workflow.objects.all().delete()
|
|
|
|
|
+ Coordinator.objects.all().delete()
|
|
|
|
|
+
|
|
|
|
|
+ self.c = make_logged_in_client()
|
|
|
|
|
+ self.wf = create_workflow()
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_list_workflows(self):
|
|
|
|
|
+ response = self.c.get(reverse('oozie:list_oozie_workflows'))
|
|
|
|
|
+ for wf_id in MockOozieApi.WORKFLOW_IDS:
|
|
|
|
|
+ assert_true(wf_id in response.content, response.content)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_list_coordinators(self):
|
|
|
|
|
+ response = self.c.get(reverse('oozie:list_oozie_coordinators'))
|
|
|
|
|
+ for coord_id in MockOozieApi.COORDINATOR_IDS:
|
|
|
|
|
+ assert_true(coord_id in response.content, response.content)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_list_workflow(self):
|
|
|
|
|
+ response = self.c.get(reverse('oozie:list_oozie_workflow', args=[MockOozieApi.WORKFLOW_IDS[0]]))
|
|
|
|
|
+ assert_true('Workflow WordCount1' in response.content, response.content)
|
|
|
|
|
+ assert_true('Workflow' in response.content, response.content)
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.get(reverse('oozie:list_oozie_workflow', args=[MockOozieApi.WORKFLOW_IDS[0], MockOozieApi.COORDINATOR_IDS[0]]))
|
|
|
|
|
+ assert_true('Workflow WordCount1' in response.content, response.content)
|
|
|
|
|
+ assert_true('Workflow' in response.content, response.content)
|
|
|
|
|
+ assert_true('DailyWordCount1' in response.content, response.content)
|
|
|
|
|
+ assert_true('Coordinator' in response.content, response.content)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_list_coordinator(self):
|
|
|
|
|
+ response = self.c.get(reverse('oozie:list_oozie_coordinator', args=[MockOozieApi.COORDINATOR_IDS[0]]))
|
|
|
|
|
+ assert_true('Coordinator DailyWordCount1' in response.content, response.content)
|
|
|
|
|
+ assert_true('Workflow' in response.content, response.content)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test_manage_oozie_jobs(self):
|
|
|
|
|
+ try:
|
|
|
|
|
+ self.c.get(reverse('oozie:manage_oozie_jobs', args=[MockOozieApi.COORDINATOR_IDS[0], 'kill']))
|
|
|
|
|
+ assert False
|
|
|
|
|
+ except:
|
|
|
|
|
+ pass
|
|
|
|
|
+
|
|
|
|
|
+ response = self.c.post(reverse('oozie:manage_oozie_jobs', args=[MockOozieApi.COORDINATOR_IDS[0], 'kill']))
|
|
|
|
|
+ data = json.loads(response.content)
|
|
|
|
|
+ assert_equal(0, data['status'])
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+class MockOozieApi:
|
|
|
|
|
+ JSON_WORKFLOW_LIST = [{u'status': u'RUNNING', u'run': 0, u'startTime': u'Mon, 30 Jul 2012 22:35:48 GMT', u'appName': u'WordCount1', u'lastModTime': u'Mon, 30 Jul 2012 22:37:00 GMT', u'actions': [], u'acl': None, u'appPath': None, u'externalId': None, u'consoleUrl': u'http://runreal:11000/oozie?job=0000012-120725142744176-oozie-oozi-W', u'conf': None, u'parentId': None, u'createdTime': u'Mon, 30 Jul 2012 22:35:48 GMT', u'toString': u'Workflow id[0000012-120725142744176-oozie-oozi-W] status[SUCCEEDED]', u'endTime': u'Mon, 30 Jul 2012 22:37:00 GMT', u'id': u'0000012-120725142744176-oozie-oozi-W', u'group': None, u'user': u'romain'},
|
|
|
|
|
+ {u'status': u'KILLED', u'run': 0, u'startTime': u'Mon, 30 Jul 2012 22:31:08 GMT', u'appName': u'WordCount2', u'lastModTime': u'Mon, 30 Jul 2012 22:32:20 GMT', u'actions': [], u'acl': None, u'appPath': None, u'externalId': None, u'consoleUrl': u'http://runreal:11000/oozie?job=0000011-120725142744176-oozie-oozi-W', u'conf': None, u'parentId': None, u'createdTime': u'Mon, 30 Jul 2012 22:31:08 GMT', u'toString': u'Workflow id[0000011-120725142744176-oozie-oozi-W] status[SUCCEEDED]', u'endTime': u'Mon, 30 Jul 2012 22:32:20 GMT', u'id': u'0000011-120725142744176-oozie-oozi-W', u'group': None, u'user': u'romain'},
|
|
|
|
|
+ {u'status': u'SUCCEEDED', u'run': 0, u'startTime': u'Mon, 30 Jul 2012 22:20:48 GMT', u'appName': u'WordCount3', u'lastModTime': u'Mon, 30 Jul 2012 22:22:00 GMT', u'actions': [], u'acl': None, u'appPath': None, u'externalId': None, u'consoleUrl': u'http://runreal:11000/oozie?job=0000009-120725142744176-oozie-oozi-W', u'conf': None, u'parentId': None, u'createdTime': u'Mon, 30 Jul 2012 22:20:48 GMT', u'toString': u'Workflow id[0000009-120725142744176-oozie-oozi-W] status[SUCCEEDED]', u'endTime': u'Mon, 30 Jul 2012 22:22:00 GMT', u'id': u'0000009-120725142744176-oozie-oozi-W', u'group': None, u'user': u'romain'},
|
|
|
|
|
+ {u'status': u'SUCCEEDED', u'run': 0, u'startTime': u'Mon, 30 Jul 2012 22:16:58 GMT', u'appName': u'WordCount4', u'lastModTime': u'Mon, 30 Jul 2012 22:18:10 GMT', u'actions': [], u'acl': None, u'appPath': None, u'externalId': None, u'consoleUrl': u'http://runreal:11000/oozie?job=0000008-120725142744176-oozie-oozi-W', u'conf': None, u'parentId': None, u'createdTime': u'Mon, 30 Jul 2012 22:16:58 GMT', u'toString': u'Workflow id[0000008-120725142744176-oozie-oozi-W] status[SUCCEEDED]', u'endTime': u'Mon, 30 Jul 2012 22:18:10 GMT', u'id': u'0000008-120725142744176-oozie-oozi-W', u'group': None, u'user': u'romain'}]
|
|
|
|
|
+ WORKFLOW_IDS = [wf['id'] for wf in JSON_WORKFLOW_LIST]
|
|
|
|
|
+
|
|
|
|
|
+ JSON_COORDINATOR_LIST = [{u'startTime': u'Sun, 01 Jul 2012 00:00:00 GMT', u'actions': [], u'frequency': 1, u'concurrency': 1, u'pauseTime': None, u'group': None, u'toString': u'Coornidator application id[0000041-120717205528122-oozie-oozi-C] status[DONEWITHERROR]', u'consoleUrl': None, u'mat_throttling': 0, u'status': u'DONEWITHERROR', u'conf': None, u'user': u'romain', u'timeOut': 120, u'coordJobPath': u'hdfs://localhost:8020/user/romain/demo2', u'timeUnit': u'DAY', u'coordJobId': u'0000041-120717205528122-oozie-oozi-C', u'coordJobName': u'DailyWordCount1', u'nextMaterializedTime': u'Wed, 04 Jul 2012 00:00:00 GMT', u'coordExternalId': None, u'acl': None, u'lastAction': u'Wed, 04 Jul 2012 00:00:00 GMT', u'executionPolicy': u'FIFO', u'timeZone': u'America/Los_Angeles', u'endTime': u'Wed, 04 Jul 2012 00:00:00 GMT'},
|
|
|
|
|
+ {u'startTime': u'Sun, 01 Jul 2012 00:00:00 GMT', u'actions': [], u'frequency': 1, u'concurrency': 1, u'pauseTime': None, u'group': None, u'toString': u'Coornidator application id[0000011-120706144403213-oozie-oozi-C] status[DONEWITHERROR]', u'consoleUrl': None, u'mat_throttling': 0, u'status': u'DONEWITHERROR', u'conf': None, u'user': u'romain', u'timeOut': 120, u'coordJobPath': u'hdfs://localhost:8020/user/hue/jobsub/_romain_-design-2', u'timeUnit': u'DAY', u'coordJobId': u'0000011-120706144403213-oozie-oozi-C', u'coordJobName': u'DailyWordCount2', u'nextMaterializedTime': u'Thu, 05 Jul 2012 00:00:00 GMT', u'coordExternalId': None, u'acl': None, u'lastAction': u'Thu, 05 Jul 2012 00:00:00 GMT', u'executionPolicy': u'FIFO', u'timeZone': u'America/Los_Angeles', u'endTime': u'Wed, 04 Jul 2012 18:54:00 GMT'},
|
|
|
|
|
+ {u'startTime': u'Sun, 01 Jul 2012 00:00:00 GMT', u'actions': [], u'frequency': 1, u'concurrency': 1, u'pauseTime': None, u'group': None, u'toString': u'Coornidator application id[0000010-120706144403213-oozie-oozi-C] status[DONEWITHERROR]', u'consoleUrl': None, u'mat_throttling': 0, u'status': u'DONEWITHERROR', u'conf': None, u'user': u'romain', u'timeOut': 120, u'coordJobPath': u'hdfs://localhost:8020/user/hue/jobsub/_romain_-design-2', u'timeUnit': u'DAY', u'coordJobId': u'0000010-120706144403213-oozie-oozi-C', u'coordJobName': u'DailyWordCount3', u'nextMaterializedTime': u'Thu, 05 Jul 2012 00:00:00 GMT', u'coordExternalId': None, u'acl': None, u'lastAction': u'Thu, 05 Jul 2012 00:00:00 GMT', u'executionPolicy': u'FIFO', u'timeZone': u'America/Los_Angeles', u'endTime': u'Wed, 04 Jul 2012 18:54:00 GMT'},
|
|
|
|
|
+ {u'startTime': u'Sun, 01 Jul 2012 00:00:00 GMT', u'actions': [], u'frequency': 1, u'concurrency': 1, u'pauseTime': None, u'group': None, u'toString': u'Coornidator application id[0000009-120706144403213-oozie-oozi-C] status[DONEWITHERROR]', u'consoleUrl': None, u'mat_throttling': 0, u'status': u'DONEWITHERROR', u'conf': None, u'user': u'romain', u'timeOut': 120, u'coordJobPath': u'hdfs://localhost:8020/user/hue/jobsub/_romain_-design-2', u'timeUnit': u'DAY', u'coordJobId': u'0000009-120706144403213-oozie-oozi-C', u'coordJobName': u'DailyWordCount4', u'nextMaterializedTime': u'Thu, 05 Jul 2012 00:00:00 GMT', u'coordExternalId': None, u'acl': None, u'lastAction': u'Thu, 05 Jul 2012 00:00:00 GMT', u'executionPolicy': u'FIFO', u'timeZone': u'America/Los_Angeles', u'endTime': u'Wed, 04 Jul 2012 18:54:00 GMT'}]
|
|
|
|
|
+ COORDINATOR_IDS = [coord['coordJobId'] for coord in JSON_COORDINATOR_LIST]
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def get_workflows(self, **kwargs):
|
|
|
|
|
+ return WorkflowList(self, {'offset': 0, 'total': 4, 'workflows': MockOozieApi.JSON_WORKFLOW_LIST})
|
|
|
|
|
+
|
|
|
|
|
+ def get_coordinators(self, **kwargs):
|
|
|
|
|
+ return CoordinatorList(self, {'offset': 0, 'total': 5, 'coordinatorjobs': MockOozieApi.JSON_COORDINATOR_LIST})
|
|
|
|
|
+
|
|
|
|
|
+ def get_job(self, job_id):
|
|
|
|
|
+ return OozieWorkflow(self, MockOozieApi.JSON_WORKFLOW_LIST[0])
|
|
|
|
|
+
|
|
|
|
|
+ def get_coordinator(self, job_id):
|
|
|
|
|
+ return OozieCoordinator(self, MockOozieApi.JSON_COORDINATOR_LIST[0])
|
|
|
|
|
+
|
|
|
|
|
+ def job_control(self, job_id, action):
|
|
|
|
|
+ return 'Done'
|
|
|
|
|
+
|
|
|
|
|
+ def get_job_definition(self, jobid):
|
|
|
|
|
+ return '<xml></xml>'
|
|
|
|
|
+
|
|
|
|
|
+ def get_job_log(self, jobid):
|
|
|
|
|
+ return '<xml></xml>'
|
|
|
|
|
+
|