Эх сурвалжийг харах

HUE-1212 [fb] Incorrect trash used

Cache home directory on a per-user basis.
Abraham Elmahrek 12 жил өмнө
parent
commit
b9a41d0bce

+ 1 - 3
apps/filebrowser/src/filebrowser/views.py

@@ -1029,10 +1029,8 @@ def rmtree(request):
     recurring = []
     recurring = []
     params = ["path"]
     params = ["path"]
     def bulk_rmtree(*args, **kwargs):
     def bulk_rmtree(*args, **kwargs):
-        original = request.fs.setskiptrash('skip_trash' in request.GET)
         for arg in args:
         for arg in args:
-            request.fs.do_as_user(request.user, request.fs.rmtree, arg['path'])
-        request.fs.setskiptrash(original)
+            request.fs.do_as_user(request.user, request.fs.rmtree, arg['path'], 'skip_trash' in request.GET)
     return generic_op(RmTreeFormSet, request, bulk_rmtree, ["path"], None,
     return generic_op(RmTreeFormSet, request, bulk_rmtree, ["path"], None,
                       data_extractor=formset_data_extractor(recurring, params),
                       data_extractor=formset_data_extractor(recurring, params),
                       arg_extractor=formset_arg_extractor,
                       arg_extractor=formset_arg_extractor,

+ 1 - 1
desktop/core/src/desktop/lib/eventlet_util.py

@@ -24,4 +24,4 @@ if os.getenv('HUE_SPAWNING', 'no') == 'yes':
   #
   #
   # A cleaner solution would be to avoid this magic and find a way to have
   # A cleaner solution would be to avoid this magic and find a way to have
   # the eventlet.green imports done at the very beginning (i.e. before 'socket').
   # the eventlet.green imports done at the very beginning (i.e. before 'socket').
-  eventlet.monkey_patch()
+  eventlet.monkey_patch(thread=True)

+ 46 - 6
desktop/libs/hadoop/src/hadoop/fs/test_webhdfs.py

@@ -24,7 +24,7 @@ import logging
 import posixfile
 import posixfile
 import random
 import random
 import sys
 import sys
-from threading import Thread
+from threading
 import unittest
 import unittest
 
 
 from hadoop import conf, pseudo_hdfs4
 from hadoop import conf, pseudo_hdfs4
@@ -37,11 +37,10 @@ class WebhdfsTests(unittest.TestCase):
   requires_hadoop = True
   requires_hadoop = True
 
 
   @classmethod
   @classmethod
-  def setup_class(cls):
+  def setUpClass(cls):
     cls.cluster = pseudo_hdfs4.shared_cluster()
     cls.cluster = pseudo_hdfs4.shared_cluster()
 
 
   def setUp(self):
   def setUp(self):
-    WebhdfsTests.setup_class()
     self.cluster.fs.setuser(self.cluster.superuser)
     self.cluster.fs.setuser(self.cluster.superuser)
 
 
   def tearDown(self):
   def tearDown(self):
@@ -265,7 +264,7 @@ class WebhdfsTests(unittest.TestCase):
     # make sure that isn't reflected.
     # make sure that isn't reflected.
     fs = self.cluster.fs
     fs = self.cluster.fs
     fs.setuser("alpha")
     fs.setuser("alpha")
-    class T(Thread):
+    class T(threading.Thread):
       def run(self):
       def run(self):
         fs.setuser("beta")
         fs.setuser("beta")
         assert_equals("beta", fs.user)
         assert_equals("beta", fs.user)
@@ -446,9 +445,50 @@ class WebhdfsTests(unittest.TestCase):
       trash_path = reduce(lambda a, b: a[0] and a or b, zip(exists, trash_paths))[1]
       trash_path = reduce(lambda a, b: a[0] and a or b, zip(exists, trash_paths))[1]
 
 
       # Restore
       # Restore
-      assert_raises(WebHdfsException, self.cluster.fs.do_as_user, 'nouser', self.cluster.fs.restore, trash_path)
+      assert_raises(IOError, self.cluster.fs.do_as_user, 'nouser', self.cluster.fs.restore, trash_path)
     finally:
     finally:
       try:
       try:
         self.cluster.fs.rmtree(PATH)
         self.cluster.fs.rmtree(PATH)
       except Exception, ex:
       except Exception, ex:
-        LOG.error('Failed to cleanup %s: %s' % (PATH, ex))
+        LOG.error('Failed to cleanup %s: %s' % (PATH, ex))
+
+  def test_trash_users(self):
+    """
+    Imitate eventlet green thread re-use and ensure trash works.
+    """
+    class test_local(object):
+      def __getattribute__(self, name):
+        return object.__getattribute__(self, name)
+      def __setattr__(self, name, value):
+        return object.__setattr__(self, name, value)
+      def __delattr__(self, name):
+        return object.__delattr__(self, name)
+
+    threading.local = test_local
+
+    USERS = ['test1', 'test2']
+    CLEANUP = []
+
+    try:
+      for user in USERS:
+        # Create home directory.
+        self.cluster.fs.setuser(user)
+        self.cluster.fs.create_home_dir()
+        CLEANUP.append(self.cluster.fs.get_home_dir())
+
+        # Move to trash for both users.
+        # If there is a thread local issue, then this will fail.
+        PATH = self.cluster.fs.join(self.cluster.fs.get_home_dir(), 'trash_test')
+        self.cluster.fs.open(PATH, 'w').close()
+        assert_true(self.cluster.fs.exists(PATH))
+        self.cluster.fs.remove(PATH)
+        assert_false(self.cluster.fs.exists(PATH))
+        assert_true(self.cluster.fs.exists(self.cluster.fs.trash_path))
+    finally:
+      reload(threading)
+      self.cluster.fs.setuser(self.cluster.superuser)
+      for directory in CLEANUP:
+        try:
+          self.cluster.fs.rmtree(dir)
+        except Exception, ex:
+          LOG.error('Failed to cleanup %s: %s' % (directory, ex))

+ 1 - 3
desktop/libs/hadoop/src/hadoop/fs/upload.py

@@ -89,9 +89,7 @@ class HDFStemporaryUploadedFile(object):
 
 
   def remove(self):
   def remove(self):
     try:
     try:
-      original = self._fs.setskiptrash(True)
-      self._fs.remove(self._path)
-      self._fs.setskiptrash(original)
+      self._fs.remove(self._path, True)
       self._do_cleanup = False
       self._do_cleanup = False
     except IOError, ex:
     except IOError, ex:
       if ex.errno != errno.ENOENT:
       if ex.errno != errno.ENOENT:

+ 14 - 27
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -126,23 +126,18 @@ class WebHdfs(Hdfs):
   @property
   @property
   def trash_path(self):
   def trash_path(self):
     try:
     try:
-      return self._thread_local.trash_path
+      return self._thread_local.trash_path[self.user]
     except AttributeError:
     except AttributeError:
-      self._thread_local.trash_path = self.join(self.get_home_dir(), '.Trash')
-    return self._thread_local.trash_path
+      self._thread_local.trash_paths = {}
+      self._thread_local.trash_paths[self.user] = self.join(self.get_home_dir(), '.Trash')
+    except KeyError:
+      self._thread_local.trash_paths[self.user] = self.join(self.get_home_dir(), '.Trash')
+    return self._thread_local.trash_paths[self.user]
 
 
   @property
   @property
   def current_trash_path(self):
   def current_trash_path(self):
     return self.join(self.trash_path, self.TRASH_CURRENT)
     return self.join(self.trash_path, self.TRASH_CURRENT)
 
 
-  @property
-  def skip_trash(self):
-    try:
-      return self._thread_local.skip_trash
-    except AttributeError:
-      self._thread_local.skip_trash = False
-    return self._thread_local.skip_trash
-
   def _getparams(self):
   def _getparams(self):
     return {
     return {
       "user.name" : WebHdfs.DEFAULT_USER,
       "user.name" : WebHdfs.DEFAULT_USER,
@@ -155,11 +150,6 @@ class WebHdfs(Hdfs):
     self._thread_local.user = user
     self._thread_local.user = user
     return curr
     return curr
 
 
-  def setskiptrash(self, skip_trash):
-    curr = self.skip_trash
-    self._thread_local.skip_trash = skip_trash
-    return curr
-
   def listdir_stats(self, path, glob=None):
   def listdir_stats(self, path, glob=None):
     """
     """
     listdir_stats(path, glob=None) -> [ WebHdfsStat ]
     listdir_stats(path, glob=None) -> [ WebHdfsStat ]
@@ -234,7 +224,7 @@ class WebHdfs(Hdfs):
 
 
   def _ensure_current_trash_directory(self):
   def _ensure_current_trash_directory(self):
     """Create trash directory for a user if it doesn't exist."""
     """Create trash directory for a user if it doesn't exist."""
-    if not self.exists(self.current_trash_path):
+    if self.exists(self.current_trash_path):
       self.mkdir(self.current_trash_path)
       self.mkdir(self.current_trash_path)
     return self.current_trash_path
     return self.current_trash_path
 
 
@@ -283,20 +273,20 @@ class WebHdfs(Hdfs):
     if not result['boolean']:
     if not result['boolean']:
       raise IOError(_('Delete failed: %s') % path)
       raise IOError(_('Delete failed: %s') % path)
 
 
-  def remove(self, path):
+  def remove(self, path, skip_trash=False):
     """Delete a file."""
     """Delete a file."""
-    if hadoop.core_site.get_trash_interval() is None or self.skip_trash:
+    if hadoop.core_site.get_trash_interval() is None or skip_trash:
       self._delete(path, recursive=False)
       self._delete(path, recursive=False)
     else:
     else:
       self._trash(path, recursive=False)
       self._trash(path, recursive=False)
 
 
-  def rmdir(self, path):
+  def rmdir(self, path, skip_trash=False):
     """Delete a directory."""
     """Delete a directory."""
-    self.remove(path)
+    self.remove(path, skip_trash)
 
 
-  def rmtree(self, path):
+  def rmtree(self, path, skip_trash=False):
     """Delete a tree recursively."""
     """Delete a tree recursively."""
-    if hadoop.core_site.get_trash_interval() is None or self.skip_trash:
+    if hadoop.core_site.get_trash_interval() is None or skip_trash:
       self._delete(path, recursive=True)
       self._delete(path, recursive=True)
     else:
     else:
       self._trash(path, recursive=True)
       self._trash(path, recursive=True)
@@ -339,10 +329,8 @@ class WebHdfs(Hdfs):
     if hadoop.core_site.get_trash_interval() is None:
     if hadoop.core_site.get_trash_interval() is None:
       raise IOError(errno.EPERM, _("Trash is not enabled."))
       raise IOError(errno.EPERM, _("Trash is not enabled."))
 
 
-    original = self.setskiptrash(True)
     for timestamped_directory in self.listdir(self.trash_path):
     for timestamped_directory in self.listdir(self.trash_path):
-      self.rmtree(self.join(self.trash_path, timestamped_directory))
-    self.setskiptrash(original)
+      self.rmtree(self.join(self.trash_path, timestamped_directory), True)
 
 
   def mkdir(self, path, mode=None):
   def mkdir(self, path, mode=None):
     """
     """
@@ -661,7 +649,6 @@ class WebHdfs(Hdfs):
 
 
   def do_as_user(self, username, fn, *args, **kwargs):
   def do_as_user(self, username, fn, *args, **kwargs):
     prev_user = self.user
     prev_user = self.user
-
     try:
     try:
       self.setuser(username)
       self.setuser(username)
       return fn(*args, **kwargs)
       return fn(*args, **kwargs)