Explorar o código

HUE-928 [filebrowser] Copy file or directory

Using new hadoop library mechanism copy.
Added test cases validating copying mechanisms.
Added front end for copying file in filebrowser.
abec %!s(int64=13) %!d(string=hai) anos
pai
achega
635e0ce274

+ 10 - 0
apps/filebrowser/src/filebrowser/forms.py

@@ -79,6 +79,16 @@ class BaseRenameFormSet(FormSet):
 
 RenameFormSet = formset_factory(RenameForm, formset=BaseRenameFormSet, extra=0)
 
+class CopyForm(forms.Form):
+  op = "copy"
+  src_path = CharField(label=_("File to copy"), help_text=_("The file to copy."))
+  dest_path = CharField(label=_("Destination location"), help_text=_("Copy the file to:"))
+
+class BaseCopyFormSet(FormSet):
+  op = "copy"
+
+CopyFormSet = formset_factory(CopyForm, formset=BaseCopyFormSet, extra=0)
+
 class UploadFileForm(forms.Form):
   op = "upload"
   # The "hdfs" prefix in "hdfs_file" triggers the HDFSfileUploadHandler

+ 1 - 0
apps/filebrowser/src/filebrowser/templates/listdir.mako

@@ -36,6 +36,7 @@ ${ commonheader(_('File Browser'), 'filebrowser', user) | n,unicode }
         <%def name="actions()">
             <button class="btn fileToolbarBtn" title="${_('Rename')}" data-bind="click: renameFile, enable: selectedFiles().length == 1"><i class="icon-font"></i> ${_('Rename')}</button>
             <button class="btn fileToolbarBtn" title="${_('Move')}" data-bind="click: move, enable: selectedFiles().length > 0"><i class="icon-random"></i> ${_('Move')}</button>
+            <button class="btn fileToolbarBtn" title="${_('Copy')}" data-bind="click: copy, enable: selectedFiles().length > 0"><i class="icon-retweet"></i> ${_('Copy')}</button>
             %if is_fs_superuser:
                 <button class="btn fileToolbarBtn" title="${_('Change Owner / Group')}" data-bind="click: changeOwner, enable: selectedFiles().length > 0"><i class="icon-user"></i> ${_('Change Owner / Group')}</button>
             %endif

+ 53 - 0
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -301,6 +301,32 @@ from django.utils.translation import ugettext as _
         </form>
     </div>
 
+    <!-- copy modal -->
+    <div id="copyModal" class="modal hide fade">
+        <form id="copyForm" action="/filebrowser/copy" method="POST" enctype="multipart/form-data" class="form-inline form-padding-fix">
+            <div class="modal-header">
+                <a href="#" class="close" data-dismiss="modal">&times;</a>
+                <h3>${_('Copy:')}</h3>
+            </div>
+            <div class="modal-body">
+                <div style="padding-left: 15px;">
+                    <label for="copyDestination">${_('Destination')}</label>
+                    <input type="text" class="input-xlarge pathChooser" value="" name="dest_path" id="copyDestination" /><a class="btn fileChooserBtn" href="#" data-filechooser-destination="dest_path">..</a>
+                </div>
+                <br/>
+                <div class="fileChooserModal" class="hide">
+                </div>
+            </div>
+            <div class="modal-footer">
+                <div id="copyNameRequiredAlert" class="hide" style="position: absolute; left: 10;">
+                    <span class="label label-important">${_('Name is required.')}</span>
+                </div>
+                <a class="btn" onclick="$('#copyModal').modal('hide');">${_('Cancel')}</a>
+                <input class="btn btn-primary" type="submit" value="${_('Submit')}"/>
+            </div>
+        </form>
+    </div>
+
     <!-- upload file modal -->
     <div id="uploadFileModal" class="modal hide fade">
         <div class="modal-header">
@@ -505,6 +531,20 @@ from django.utils.translation import ugettext as _
         $("#moveForm").find("input[name='dest_path']").removeClass("fieldError");
       });
 
+      $("#copyForm").on("submit", function () {
+        if ($.trim($("#copyForm").find("input.pathChooser").val()) == "") {
+          $("#copyNameRequiredAlert").show();
+          $("#copyForm").find("input[name='*dest_path']").addClass("fieldError");
+          return false;
+        }
+        return true;
+      });
+
+      $("#copyForm").find("input[name='dest_path']").on("focus", function () {
+        $("#copyNameRequiredAlert").hide();
+        $("#copyForm").find("input[name='dest_path']").recopyClass("fieldError");
+      });
+
       $(".create-directory-link").click(function () {
         $("#createDirectoryModal").modal({
           keyboard:true,
@@ -907,6 +947,19 @@ from django.utils.translation import ugettext as _
         });
       };
 
+      self.copy = function () {
+        var paths = [];
+        $(self.selectedFiles()).each(function (index, file) {
+          paths.push(file.path);
+        });
+        hiddenFields($("#copyForm"), "src_path", paths);
+        $("#copyForm").attr("action", "/filebrowser/copy?next=${url('filebrowser.views.view', path=urlencode('/'))}" + "." + self.currentPath());
+        $("#copyModal").modal({
+          keyboard:true,
+          show:true
+        });
+      };
+
       self.changeOwner = function () {
         var paths = [];
         $(self.selectedFiles()).each(function (index, file) {

+ 1 - 0
apps/filebrowser/src/filebrowser/urls.py

@@ -42,6 +42,7 @@ urlpatterns = patterns('filebrowser.views',
   url(r'^mkdir$', 'mkdir', name='mkdir'),
   url(r'^touch$', 'touch', name='touch'),
   url(r'^move$', 'move', name='move'),
+  url(r'^copy$', 'copy', name='copy'),
   url(r'^rmtree$', 'rmtree', name='rmtree'),
   url(r'^chmod$', 'chmod', name='chmod'),
   url(r'^chown$', 'chown', name='chown'),

+ 14 - 1
apps/filebrowser/src/filebrowser/views.py

@@ -57,7 +57,7 @@ from filebrowser.lib.archives import archive_factory
 from filebrowser.lib.rwx import filetype, rwx
 from filebrowser.lib import xxd
 from filebrowser.forms import RenameForm, UploadFileForm, UploadArchiveForm, MkDirForm, EditorForm, TouchForm,\
-    RenameFormSet, RmTreeFormSet, ChmodFormSet,ChownFormSet
+                              RenameFormSet, RmTreeFormSet, ChmodFormSet, ChownFormSet, CopyFormSet
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 
@@ -1018,6 +1018,19 @@ def move(request):
                       initial_value_extractor=formset_initial_value_extractor)
 
 
+@require_http_methods(["POST"])
+def copy(request):
+    recurring = ['dest_path']
+    params = ['src_path']
+    def bulk_copy(*args, **kwargs):
+        for arg in args:
+            request.fs.copy(arg['src_path'], arg['dest_path'], recursive=True, owner=request.user)
+    return generic_op(CopyFormSet, request, bulk_copy, ["src_path", "dest_path"], None,
+                      data_extractor=formset_data_extractor(recurring, params),
+                      arg_extractor=formset_arg_extractor,
+                      initial_value_extractor=formset_initial_value_extractor)
+
+
 @require_http_methods(["POST"])
 def chmod(request):
     recurring = ["sticky", "user_read", "user_write", "user_execute", "group_read", "group_write", "group_execute", "other_read", "other_write", "other_execute"]

+ 54 - 0
apps/filebrowser/src/filebrowser/views_test.py

@@ -139,6 +139,60 @@ def test_move():
       pass      # Don't let cleanup errors mask earlier failures
 
 
+@attr('requires_hadoop')
+def test_copy():
+  cluster = pseudo_hdfs4.shared_cluster()
+
+  try:
+    c = make_logged_in_client(cluster.superuser)
+    cluster.fs.setuser(cluster.superuser)
+
+    prefix = '/test-copy'
+    PATH_1 = '%s/1' % prefix
+    PATH_2 = '%s/2' % prefix
+    SUB_PATH1_1 = '%s/1' % PATH_1
+    SUB_PATH1_2 = '%s/2' % PATH_1
+    SUB_PATH1_3 = '%s/3' % PATH_1
+    SUB_PATH2_1 = '%s/1' % PATH_2
+    SUB_PATH2_2 = '%s/2' % PATH_2
+    SUB_PATH2_3 = '%s/3' % PATH_2
+    cluster.fs.mkdir(prefix)
+    cluster.fs.mkdir(PATH_1)
+    cluster.fs.mkdir(PATH_2)
+    cluster.fs.mkdir(SUB_PATH1_1)
+    cluster.fs.mkdir(SUB_PATH1_2)
+    cluster.fs.mkdir(SUB_PATH1_3)
+
+    assert_true(cluster.fs.exists(SUB_PATH1_1))
+    assert_true(cluster.fs.exists(SUB_PATH1_2))
+    assert_true(cluster.fs.exists(SUB_PATH1_3))
+    assert_false(cluster.fs.exists(SUB_PATH2_1))
+    assert_false(cluster.fs.exists(SUB_PATH2_2))
+    assert_false(cluster.fs.exists(SUB_PATH2_3))
+
+    c.post('/filebrowser/copy', dict(src_path=[SUB_PATH1_1], dest_path=PATH_2))
+    assert_true(cluster.fs.exists(SUB_PATH1_1))
+    assert_true(cluster.fs.exists(SUB_PATH1_2))
+    assert_true(cluster.fs.exists(SUB_PATH1_3))
+    assert_true(cluster.fs.exists(SUB_PATH2_1))
+    assert_false(cluster.fs.exists(SUB_PATH2_2))
+    assert_false(cluster.fs.exists(SUB_PATH2_3))
+
+    c.post('/filebrowser/copy', dict(src_path=[SUB_PATH1_2, SUB_PATH1_3], dest_path=PATH_2))
+    assert_true(cluster.fs.exists(SUB_PATH1_1))
+    assert_true(cluster.fs.exists(SUB_PATH1_2))
+    assert_true(cluster.fs.exists(SUB_PATH1_3))
+    assert_true(cluster.fs.exists(SUB_PATH2_1))
+    assert_true(cluster.fs.exists(SUB_PATH2_2))
+    assert_true(cluster.fs.exists(SUB_PATH2_3))
+
+  finally:
+    try:
+      cluster.fs.rmtree(prefix)     # Clean up
+    except:
+      pass      # Don't let cleanup errors mask earlier failures
+
+
 @attr('requires_hadoop')
 def test_mkdir_singledir():
   cluster = pseudo_hdfs4.shared_cluster()

+ 51 - 12
desktop/libs/hadoop/src/hadoop/fs/fs_test.py

@@ -23,7 +23,7 @@ import unittest
 
 from hadoop import fs, pseudo_hdfs4
 from nose.plugins.attrib import attr
-from nose.tools import assert_equal
+from nose.tools import assert_equal, assert_true
 
 logger = logging.getLogger(__name__)
 
@@ -100,20 +100,59 @@ def test_hdfs_copy():
   minicluster = pseudo_hdfs4.shared_cluster()
   minifs = minicluster.fs
 
-  olduser = minifs.setuser(minifs.superuser)
-  minifs.chmod('/', 0777)
-  minifs.setuser(olduser)
+  try:
+    olduser = minifs.setuser(minifs.superuser)
+    minifs.chmod('/', 0777)
+    minifs.setuser(olduser)
 
-  data = "I will not make flatuent noises in class\n" * 2000
-  minifs.create('/copy_test_src', permission=0646, data=data)
-  minifs.create('/copy_test_dst', data="some initial data")
+    data = "I will not make flatuent noises in class\n" * 2000
+    minifs.create('/copy_test_src', permission=0646, data=data)
+    minifs.create('/copy_test_dst', data="some initial data")
 
-  minifs.copyfile('/copy_test_src', '/copy_test_dst')
-  actual = minifs.read('/copy_test_dst', 0, len(data) + 100)
-  assert_equal(data, actual)
+    minifs.copyfile('/copy_test_src', '/copy_test_dst')
+    actual = minifs.read('/copy_test_dst', 0, len(data) + 100)
+    assert_equal(data, actual)
+
+    sb = minifs.stats('/copy_test_dst')
+    assert_equal(0646, stat.S_IMODE(sb.mode))
+
+  finally:
+    minifs.rmtree('/copy_test_src')
+    minifs.rmtree('/copy_test_dst')
+
+@attr('requires_hadoop')
+def test_hdfs_full_copy():
+  minicluster = pseudo_hdfs4.shared_cluster()
+  minifs = minicluster.fs
 
-  sb = minifs.stats('/copy_test_dst')
-  assert_equal(0646, stat.S_IMODE(sb.mode))
+  try:
+    minifs.do_as_superuser(minifs.chmod, '/', 0777)
+    minifs.mkdir('/copy_test')
+    minifs.mkdir('/copy_test/src')
+    minifs.mkdir('/copy_test/dest')
+
+    # File to directory copy.
+    # No guarantees on file permissions at the moment.
+    data = "I will not make flatuent noises in class\n" * 2000
+    minifs.create('/copy_test/src/file.txt', permission=0646, data=data)
+    minifs.copy('/copy_test/src/file.txt', '/copy_test/dest')
+    assert_true(minifs.exists('/copy_test/dest/file.txt'))
+
+    # Directory to directory copy.
+    # No guarantees on directory permissions at the moment.
+    minifs.copy('/copy_test/src', '/copy_test/dest', True)
+    assert_true(minifs.exists('/copy_test/dest/src'))
+
+    # Copy directory to file should fail.
+    try:
+      minifs.copy('/copy_test/src', '/copy_test/dest/file.txt', True)
+    except IOError, e:
+      pass
+    except Exception, e:
+      raise
+
+  finally:
+    minifs.rmtree('/copy_test')
 
 @attr('requires_hadoop')
 def test_hdfs_copy_from_local():

+ 18 - 0
desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py

@@ -220,6 +220,20 @@ class Hdfs(object):
     path = url[i:]
     return (schema, netloc, normpath(path), '', '')
 
+  def listdir_recursive(self, path, glob=None):
+    """
+    listdir_recursive(path, glob=None) -> [ entry names ]
+
+    Get directory entry names without stats, recursively.
+    """
+    paths = [path]
+    while paths:
+      path = paths.pop()
+      if self.isdir(path):
+        hdfs_paths = self.listdir_stats(path, glob)
+        paths[:0] = [x.path for x in hdfs_paths]
+      yield path
+
   def create_home_dir(self, home_path=None):
     if home_path is None:
       home_path = self.get_home_dir()
@@ -287,6 +301,7 @@ class Hdfs(object):
     else:
       LOG.info(_('Skipping %s (not a file).') % local_src)
 
+
   @_coerce_exceptions
   def mktemp(self, subdir='', prefix='tmp', basedir=None):
     """
@@ -337,6 +352,9 @@ class Hdfs(object):
   def isdir(self):
     raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'isdir'})
 
+  def listdir_stats(self):
+    raise NotImplementedError(_("%(function)s has not been implemented.") % {'function': 'listdir_stats'})
+
 
 """
 Deprecated! Use WebHdfs instead

+ 59 - 16
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -27,6 +27,7 @@ import stat
 import threading
 
 from django.utils.encoding import smart_str
+from django.utils.translation import ugettext as _
 from desktop.lib.rest import http_client, resource
 from hadoop.fs import normpath, SEEK_SET, SEEK_CUR, SEEK_END
 from hadoop.fs.hadoopfs import Hdfs
@@ -275,20 +276,6 @@ class WebHdfs(Hdfs):
     for dirent in ls:
       self.rename(Hdfs.join(old_dir, dirent), Hdfs.join(new_dir, dirent))
 
-  def _listdir_r(self, path, glob=None):
-    """
-    _listdir_r(path, glob=None) -> [ entry names ]
-
-    Get directory entry names without stats, recursively.
-    """
-    paths = [path]
-    while paths:
-      path = paths.pop()
-      if self.isdir(path):
-        hdfs_paths = self.listdir_stats(path, glob)
-        paths[:0] = [x.path for x in hdfs_paths]
-      yield path
-
   def chown(self, path, user=None, group=None, recursive=False):
     """chown(path, user=None, group=None, recursive=False)"""
     path = Hdfs.normpath(path)
@@ -299,7 +286,7 @@ class WebHdfs(Hdfs):
     if group is not None:
       params['group'] = group
     if recursive:
-      for xpath in self._listdir_r(path):
+      for xpath in self.listdir_recursive(path):
         self._root.put(xpath, params)
     else:
       self._root.put(path, params)
@@ -316,7 +303,7 @@ class WebHdfs(Hdfs):
     params['op'] = 'SETPERMISSION'
     params['permission'] = safe_octal(mode)
     if recursive:
-      for xpath in self._listdir_r(path):
+      for xpath in self.listdir_recursive(path):
         self._root.put(xpath, params)
     else:
       self._root.put(path, params)
@@ -441,6 +428,62 @@ class WebHdfs(Hdfs):
         self.do_as_superuser(self.chown, destination_file, owner, owner)
 
 
+  def copy(self, src, dest, recursive=False, dir_mode=0755, owner=None):
+    """
+    Copy file, or directory, in HDFS to another location in HDFS.
+
+    ``src`` -- The directory, or file, to copy from.
+    ``dest`` -- the directory, or file, to copy to.
+            If 'dest' is a directory that exists, copy 'src' into dest.
+            If 'dest' is a file that exists and 'src' is a file, overwrite dest.
+            If 'dest' does not exist, create 'src' as 'dest'.
+    ``recursive`` -- Recursively copy contents of 'src' to 'dest'.
+                 This is required for directories.
+    ``dir_mode`` and ``owner`` are used to define permissions on the newly
+    copied files and directories.
+
+    This method will overwrite any pre-existing files that collide with what is being copied.
+    Copying a directory to a file is not allowed.
+    """
+    if owner is None:
+      owner = self.user
+
+    src = self.abspath(src)
+    dest = self.abspath(dest)
+
+    if not self.exists(src):
+      raise IOError(errno.ENOENT, _("File not found: %s") % src)
+
+    if self.isdir(src):
+      # 'src' is directory.
+      # Skip if not recursive copy and 'src' is directory.
+      if not recursive:
+        LOG.debug("Skipping contents of %s" % src)
+        return None
+
+      # If 'dest' is a directory change 'dest'
+      # to include 'src' basename.
+      # create 'dest' if it doesn't already exist.
+      if self.exists(dest):
+        if self.isdir(dest):
+          dest = self.join(dest, self.basename(src))
+        else:
+          raise IOError(errno.EEXIST, _("Destination file %s exists and is not a directory.") % dest)
+      self.do_as_user(owner, self.mkdir, dest)
+      self.do_as_user(owner, self.chmod, dest, mode=dir_mode)
+
+      # Copy files in 'src' directory to 'dest'.
+      self.copy_remote_dir(src, dest, dir_mode, owner)
+    else:
+      # 'src' is a file.
+      # If 'dest' is a directory, then copy 'src' into that directory.
+      # Other wise, copy to 'dest'.
+      if self.exists(dest) and self.isdir(dest):
+        self.copyfile(src, self.join(dest, self.basename(src)))
+      else:
+        self.copyfile(src, dest)
+
+
   @staticmethod
   def urlsplit(url):
     return Hdfs.urlsplit(url)