Эх сурвалжийг харах

[oozie] Remove leaked workflows from jobsub in tests

Romain Rigaux 12 жил өмнө
parent
commit
e33c265

+ 2 - 2
apps/jobsub/src/jobsub/tests.py

@@ -35,8 +35,8 @@ class TestJobsubWithHadoop(OozieServerProvider):
 
   def setUp(self):
     OozieServerProvider.setup_class()
-    self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/jobsub_test')
-    self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/user/jobsub_test', 0777, True)
+    self.cluster.fs.do_as_user('jobsub_test', self.cluster.fs.create_home_dir, '/user/jobsub_test')
+    self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/user/jobsub_test', 0777, True) # Hum?
     self.client = make_logged_in_client(username='jobsub_test')
     self.user = User.objects.get(username='jobsub_test')
 

+ 5 - 1
apps/oozie/src/oozie/models.py

@@ -192,7 +192,11 @@ class Job(models.Model):
     return  [{'name': name, 'value': value} for name, value in params.iteritems()]
 
   def can_read(self, user):
-    return self.doc.get().can_read(user)
+    try:
+      return self.doc.get().can_read(user)
+    except Exception, e:
+      LOG.error('can_read failed because the object has more than one document: %s' % self.doc.all())
+      raise e
 
   def is_editable(self, user):
     """Only owners or admins can modify a job."""

+ 12 - 8
apps/oozie/src/oozie/tests.py

@@ -21,6 +21,8 @@ import logging
 import re
 import os
 
+from itertools import chain
+
 from nose.plugins.skip import SkipTest
 from nose.tools import raises, assert_true, assert_false, assert_equal, assert_not_equal
 from django.contrib.auth.models import User
@@ -3002,15 +3004,17 @@ def add_node(workflow, name, node_type, parents, attrs={}):
 def create_workflow(client, user, workflow_dict=WORKFLOW_DICT):
   name = str(workflow_dict['name'][0])
 
-  # Leaking here for some reason 
-  Document.objects.filter(name='mapreduce1', owner__username='jobsub_test').delete()
+  # If not infinite looping
+  Node.objects.filter(workflow__name=name).delete()
 
-  if Document.objects.get_docs(user, Workflow).filter(name=name, extra='').exists():
-    for doc in Document.objects.get_docs(user, Workflow).filter(name=name, extra=''):
-      if doc.content_object:
-        client.post(reverse('oozie:delete_workflow') + '?skip_trash=true', {'job_selection': [doc.content_object.id]}, follow=True)
-      else:
-        doc.delete()
+  # Leaking here for some reason
+  for doc in list(chain(Document.objects.get_docs(user, Workflow).filter(name=name, extra=''),
+                        Document.objects.filter(name='mapreduce1', owner__username='jobsub_test').all(),
+                        Document.objects.filter(name='sleep_job-copy', owner__username='jobsub_test').all())):
+    if doc.content_object:
+      client.post(reverse('oozie:delete_workflow') + '?skip_trash=true', {'job_selection': [doc.content_object.id]}, follow=True)
+    else:
+      doc.delete()
 
   workflow_count = Document.objects.available_docs(Workflow, user).count()
   response = client.get(reverse('oozie:create_workflow'))

+ 3 - 0
desktop/libs/hadoop/src/hadoop/pseudo_hdfs4.py

@@ -252,6 +252,9 @@ class PseudoHdfs4(object):
     self.fs.do_as_superuser(self.fs.mkdir, '/var/log/hadoop-yarn/apps', 01777)
     self.fs.do_as_superuser(self.fs.chmod, '/var/log/hadoop-yarn/apps', 01777)
 
+    self.fs.create_home_dir('/user/test')
+    self.fs.create_home_dir('/user/hue')
+
 
   def _start_mr2(self, env):
     LOG.info("Starting MR2")