Browse Source

HUE-4167 [aws] Implement S3 rmtree operation

Jenny Kim 9 years ago
parent
commit
1a01014

+ 2 - 0
apps/filebrowser/src/filebrowser/templates/fb_components.mako

@@ -30,11 +30,13 @@ from django.utils.translation import ugettext as _
             </ul>
             <input id="hueBreadcrumbText" type="text" style="display:none" data-bind="value: currentPath" autocomplete="off" />
         </li>
+        % if is_trash_enabled:
         <li class="pull-right">
           <a href="${url('filebrowser.views.view', path=urlencode(path))}?default_to_trash" class="trashLink" title="${_('View trash')}">
             <i class="fa fa-trash-o"></i> ${_('Trash')}
           </a>
         </li>
+        % endif
         <li class="pull-right">
           <div class="dropdown history">
             <a href="javascript:void(0)" class="historyLink dropdown-toggle" title="${_('View History')}" data-toggle="dropdown" id="historyDropdown">

+ 3 - 2
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -472,8 +472,9 @@ from django.utils.translation import ugettext as _
       </a>
     </li>
     <li class="divider"></li>
-    <li><a href="#" data-bind="enable: $root.selectedFiles().length > 0 && isCurrentDirSelected().length == 0,
-    click: $root.trashSelected"><i class="fa fa-fw fa-times"></i> ${_('Move to trash')}</a></li>
+    % if is_trash_enabled:
+    <li><a href="#" data-bind="enable: $root.selectedFiles().length > 0 && isCurrentDirSelected().length == 0, click: $root.trashSelected"><i class="fa fa-fw fa-times"></i> ${_('Move to trash')}</a></li>
+    %endif
     <li><a href="#" class="delete-link" title="${_('Delete forever')}" data-bind="enable: $root.selectedFiles().length > 0, click: $root.deleteSelected"><i class="fa fa-fw fa-bolt"></i> ${_('Delete forever')}</a></li>
     <li class="divider"></li>
     <li data-bind="css: {'disabled': selectedFiles().length > 1 }">

+ 7 - 2
apps/filebrowser/src/filebrowser/views.py

@@ -45,7 +45,6 @@ from cStringIO import StringIO
 from gzip import GzipFile
 from avro import datafile, io
 
-from aws.conf import is_enabled as is_s3_enabled
 from desktop import appmanager
 from desktop.lib import i18n, paginator
 from desktop.lib.conf import coerce_bool
@@ -56,7 +55,6 @@ from desktop.lib.fs import splitpath
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.fsutils import do_overwrite_save
-from hadoop.fs.webhdfs import WebHdfs
 
 from filebrowser.conf import MAX_SNAPPY_DECOMPRESSION_SIZE
 from filebrowser.conf import SHOW_DOWNLOAD_BUTTON
@@ -434,11 +432,14 @@ def listdir_paged(request, path):
 
     page.object_list = [ _massage_stats(request, s) for s in shown_stats ]
 
+    is_trash_enabled = request.fs._get_scheme(path) == 'hdfs'
+
     is_fs_superuser = _is_hdfs_superuser(request)
     data = {
         'path': path,
         'breadcrumbs': breadcrumbs,
         'current_request_path': request.path,
+        'is_trash_enabled': is_trash_enabled,
         'files': page.object_list,
         'page': _massage_page(page),
         'pagesize': pagesize,
@@ -981,6 +982,10 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
                     msg += _(' Note: you are a Hue admin but not a HDFS superuser, "%(superuser)s" or part of HDFS supergroup, "%(supergroup)s".') \
                            % {'superuser': request.fs.superuser, 'supergroup': request.fs.supergroup}
                 raise PopupException(msg, detail=e)
+            except NotImplementedError, e:
+                msg = _("Cannot perform operation.")
+                raise PopupException(msg, detail=e)
+
             if next:
                 logging.debug("Next: %s" % next)
                 # Doesn't need to be quoted: quoting is done by HttpResponseRedirect.

+ 12 - 5
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -28,6 +28,8 @@ from boto.exception import S3ResponseError
 from boto.s3.key import Key
 from boto.s3.prefix import Prefix
 
+from django.utils.translation import ugettext as _
+
 from aws import s3
 from aws.s3 import normpath, s3file, translate_s3_error, S3_ROOT
 from aws.s3.s3stat import S3Stat
@@ -181,7 +183,7 @@ class S3FileSystem(object):
   @translate_s3_error
   def listdir_stats(self, path, glob=None):
     if glob is not None:
-      raise NotImplementedError("Option `glob` is not implemented")
+      raise NotImplementedError(_("Option `glob` is not implemented"))
 
     if s3.is_root(path):
       self._init_bucket_cache()
@@ -206,7 +208,12 @@ class S3FileSystem(object):
   @translate_s3_error
   def rmtree(self, path, skipTrash=False):
     if not skipTrash:
-      raise NotImplementedError('Moving to trash is not implemented for S3')
+      raise NotImplementedError(_('Moving to trash is not implemented for S3'))
+
+    bucket_name, key_name = s3.parse_uri(path)[:2]
+    if bucket_name and not key_name:
+      raise NotImplementedError(_('Deleting a bucket is not implemented for S3'))
+
     key = self._get_key(path, validate=False)
 
     if key.exists():
@@ -230,12 +237,12 @@ class S3FileSystem(object):
   @translate_s3_error
   def remove(self, path, skip_trash=False):
     if not skip_trash:
-      raise NotImplementedError('Moving to trash is not implemented for S3')
+      raise NotImplementedError(_('Moving to trash is not implemented for S3'))
     key = self._get_key(path, validate=False)
     key.bucket.delete_key(key.name)
 
   def restore(self, *args, **kwargs):
-    raise NotImplementedError('Moving to trash is not implemented for S3')
+    raise NotImplementedError(_('Moving to trash is not implemented for S3'))
 
   @translate_s3_error
   def mkdir(self, path, *args, **kwargs):
@@ -297,7 +304,7 @@ class S3FileSystem(object):
 
     for key in src_bucket.list(prefix=src_key):
       if not key.name.startswith(src_key):
-        raise RuntimeError("Invalid key to transform: %s" % key.name)
+        raise RuntimeError(_("Invalid key to transform: %s") % key.name)
       dst_name = posixpath.normpath(s3.join(dst_key, key.name[cut:]))
       key.copy(dst_bucket, dst_name)