Browse Source

HUE-4398 [fb] API should check if bucket already exists in the region and bubble up the error

Jenny Kim 9 years ago
parent
commit
01bcbd4
2 changed files with 13 additions and 2 deletions
  1. 4 0
      apps/filebrowser/src/filebrowser/views.py
  2. 9 2
      desktop/libs/aws/src/aws/s3/s3fs.py

+ 4 - 0
apps/filebrowser/src/filebrowser/views.py

@@ -45,6 +45,7 @@ from cStringIO import StringIO
 from gzip import GzipFile
 from avro import datafile, io
 
+from aws.s3.s3fs import S3FileSystemException
 from desktop import appmanager
 from desktop.lib import i18n, paginator
 from desktop.lib.conf import coerce_bool
@@ -984,6 +985,9 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
                     msg += _(' Note: you are a Hue admin but not a HDFS superuser, "%(superuser)s" or part of HDFS supergroup, "%(supergroup)s".') \
                            % {'superuser': request.fs.superuser, 'supergroup': request.fs.supergroup}
                 raise PopupException(msg, detail=e)
+            except S3FileSystemException, e:
+              msg = _("S3 filesystem exception.")
+              raise PopupException(msg, detail=e)
             except NotImplementedError, e:
                 msg = _("Cannot perform operation.")
                 raise PopupException(msg, detail=e)

+ 9 - 2
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -40,6 +40,10 @@ DEFAULT_READ_SIZE = 1024 * 1024  # 1MB
 LOG = logging.getLogger(__name__)
 
 
+class S3FileSystemException(Exception):
+  pass
+
+
 class S3FileSystem(object):
   def __init__(self, s3_connection):
     self._s3_connection = s3_connection
@@ -62,7 +66,10 @@ class S3FileSystem(object):
     try:
       bucket = self._get_bucket(name)
     except S3ResponseError, e:
-      if e.status == 404:
+      if e.status == 403:
+        raise S3FileSystemException(_('User is not authorized to access bucket named "%s". '
+          'If you are attempting to create a bucket, this bucket name is already reserved.') % name)
+      elif e.status == 404:
         bucket = self._s3_connection.create_bucket(name)
         self._bucket_cache[name] = bucket
       else:
@@ -234,7 +241,7 @@ class S3FileSystem(object):
         len(result.errors),
         '\n'.join(map(repr, result.errors)))
       LOG.error(msg)
-      raise IOError(msg)
+      raise S3FileSystemException(msg)
 
   @translate_s3_error
   def remove(self, path, skip_trash=True):