Pārlūkot izejas kodu

HUE-4519 [fb] Check for user permissions first before uploading

Jenny Kim 9 gadi atpakaļ
vecāks
revīzija
7c44807

+ 3 - 0
desktop/core/src/desktop/lib/fs/proxyfs.py

@@ -213,3 +213,6 @@ class ProxyFS(object):
 
   def upload(self, file, path, *args, **kwargs):
     self._get_fs(path).upload(file, path, *args, **kwargs)
+
+  def check_access(self, path, *args, **kwargs):
+    self._get_fs(path).check_access(path, *args, **kwargs)

+ 12 - 0
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -393,5 +393,17 @@ class S3FileSystem(object):
     new_data = data or ''
     key.set_contents_from_string(current_data + new_data, replace=True)
 
+  @translate_s3_error
+  def check_access(self, path, permission='READ'):
+    permission = permission.upper()
+    bucket_name, key_name = s3.parse_uri(path)[:2]
+    bucket = self._get_bucket(bucket_name)
+    acp = bucket.get_acl()
+    for grant in acp.acl.grants:
+      if grant.permission == permission or grant.permission == 'FULL_CONTROL':
+        # TODO: Check grant.uri for user list too
+        return True
+    return False
+
   def setuser(self, user):
     pass  # user-concept doesn't have sense for this implementation

+ 6 - 1
desktop/libs/aws/src/aws/s3/upload.py

@@ -26,6 +26,7 @@ import StringIO
 
 from django.core.files.uploadedfile import SimpleUploadedFile
 from django.core.files.uploadhandler import FileUploadHandler, SkipFile, StopFutureHandlers, StopUpload
+from django.utils.translation import ugettext as _
 
 from aws import get_s3fs
 from aws.s3 import parse_uri
@@ -58,6 +59,10 @@ class S3FileUploadHandler(FileUploadHandler):
     self._part_num = 1
 
     if self._is_s3_upload():
+      # Check access permissions before attempting upload
+      if not self._fs.check_access(self.destination, permission='WRITE'):
+        raise S3FileUploadError(_('Insufficient permissions to write to S3 path "%s".') % self.destination)
+
       self.bucket_name, self.key_name = parse_uri(self.destination)[:2]
       # Verify that the path exists
       self._fs._stats(self.destination)
@@ -112,7 +117,7 @@ class S3FileUploadHandler(FileUploadHandler):
       fs = get_s3fs()
 
     if not fs:
-      raise S3FileUploadError("No S3 filesystem found.")
+      raise S3FileUploadError(_("No S3 filesystem found."))
 
     return fs
 

+ 13 - 0
desktop/libs/hadoop/src/hadoop/fs/test_webhdfs.py

@@ -560,3 +560,16 @@ class WebhdfsTests(unittest.TestCase):
           self.cluster.fs.rmtree(dir)
         except Exception, ex:
           LOG.error('Failed to cleanup %s: %s' % (directory, ex))
+
+  def test_check_access(self):
+    # Set user to owner
+    self.cluster.fs.setuser('test')
+    assert_equals('', self.cluster.fs.check_access(path='/user/test', aclspec='rw-'))  # returns zero-length content
+
+    # Set user to superuser
+    self.cluster.fs.setuser(self.cluster.superuser)
+    assert_equals('', self.cluster.fs.check_access(path='/user/test', aclspec='rw-'))  # returns zero-length content
+
+    # Set user to non-authorized, non-superuser user
+    self.cluster.fs.setuser('nonadmin')
+    assert_raises(WebHdfsException, self.cluster.fs.check_access, path='/user/test', aclspec='rw-')

+ 10 - 1
desktop/libs/hadoop/src/hadoop/fs/upload.py

@@ -30,9 +30,11 @@ import logging
 import time
 
 from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers, StopUpload
+from django.utils.translation import ugettext as _
 
 import hadoop.cluster
 from hadoop.conf import UPLOAD_CHUNK_SIZE
+from hadoop.fs.exceptions import WebHdfsException
 
 
 LOG = logging.getLogger(__name__)
@@ -61,12 +63,19 @@ class HDFStemporaryUploadedFile(object):
 
     # Don't want to handle this upload if we don't have an HDFS
     if not self._fs:
-      raise HDFSerror("No HDFS found")
+      raise HDFSerror(_("No HDFS found"))
 
     # We want to set the user to be the user doing the upload
     self._fs.setuser(request.user.username)
     self._path = self._fs.mkswap(name, suffix='tmp', basedir=destination)
 
+    # Check access permissions before attempting upload
+    try:
+      self._fs.check_access(destination, 'rw-')
+    except WebHdfsException, e:
+      LOG.exception(e)
+      raise HDFSerror(_('User %s does not have permissions to write to path "%s".') % (request.user.username, destination))
+
     if self._fs.exists(self._path):
       self._fs._delete(self._path)
     self._file = self._fs.open(self._path, 'w')

+ 8 - 0
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -566,6 +566,14 @@ class WebHdfs(Hdfs):
       return self._root.get(path, params)
 
 
+  def check_access(self, path, aclspec='rw-'):
+    path = Hdfs.normpath(path)
+    params = self._getparams()
+    params['op'] = 'CHECKACCESS'
+    params['fsaction'] = aclspec
+    return self._root.get(path, params)
+
+
   def copyfile(self, src, dst, skip_header=False):
     sb = self._stats(src)
     if sb is None: