Эх сурвалжийг харах

HUE-9197 [abfs] Add missing interface attributes

Light code formatting.
To avoid 500 on system with only abfs:

[17/Mar/2020 12:40:49 -0700] middleware   INFO     Processing exception: 'ABFS' object has no attribute 'supergroup': Traceback (most recent call last):
  File "/home/romain/projects/hue/build/env/lib/python3.6/site-packages/django/core/handlers/base.py", line 185, in _get_response
    response = wrapped_callback(request, *callback_args, **callback_kwargs)
  File "/usr/lib/python3.6/contextlib.py", line 52, in inner
    return func(*args, **kwds)
  File "/home/romain/projects/hue/apps/filebrowser/src/filebrowser/views.py", line 215, in view
    return listdir_paged(request, path)
  File "/home/romain/projects/hue/apps/filebrowser/src/filebrowser/views.py", line 528, in listdir_paged
    is_fs_superuser = _is_hdfs_superuser(request)
  File "/home/romain/projects/hue/apps/filebrowser/src/filebrowser/views.py", line 1437, in _is_hdfs_superuser
    return request.user.username == request.fs.superuser or request.user.groups.filter(name__exact=request.fs.supergroup).exists()
  File "/home/romain/projects/hue/desktop/core/src/desktop/lib/fs/proxyfs.py", line 56, in __getattr__
    return getattr(object.__getattribute__(self, "_default_fs"), item)
AttributeError: 'ABFS' object has no attribute 'supergroup'
Romain 5 жил өмнө
parent
commit
226a3a80fb

+ 37 - 23
desktop/libs/azure/src/azure/abfs/abfs.py

@@ -61,24 +61,28 @@ class ABFSFileSystemException(IOError):
 
 class ABFS(object):
 
-  def __init__(self, url,
-               fs_defaultfs,
-               logical_name=None,
-               hdfs_superuser=None,
-               security_enabled=False,
-               ssl_cert_ca_verify=True,
-               temp_dir="/tmp",
-               umask=0o1022,
-               hdfs_supergroup=None,
-               access_token=None,
-               token_type=None,
-               expiration=None):
+  def __init__(
+      self,
+      url,
+      fs_defaultfs,
+      logical_name=None,
+      hdfs_superuser=None,
+      security_enabled=False,
+      ssl_cert_ca_verify=True,
+      temp_dir="/tmp",
+      umask=0o1022,
+      hdfs_supergroup=None,
+      access_token=None,
+      token_type=None,
+      expiration=None
+    ):
     self._url = url
     self._superuser = hdfs_superuser
     self._security_enabled = security_enabled
     self._ssl_cert_ca_verify = ssl_cert_ca_verify
     self._temp_dir = temp_dir
     self._umask = umask
+    self.is_sentry_managed = lambda path: False
     self._fs_defaultfs = fs_defaultfs
     self._logical_name = logical_name
     self._supergroup = hdfs_supergroup
@@ -102,17 +106,19 @@ class ABFS(object):
   @classmethod
   def from_config(cls, hdfs_config, auth_provider):
     credentials = auth_provider.get_credentials()
-    return cls(url=hdfs_config.WEBHDFS_URL.get(),
-               fs_defaultfs=hdfs_config.FS_DEFAULTFS.get(),
-               logical_name=None,
-               security_enabled=False,
-               ssl_cert_ca_verify=False,
-               temp_dir=None,
-               umask=get_umask_mode(),
-               hdfs_supergroup=None,
-               access_token=credentials.get('access_token'),
-               token_type=credentials.get('token_type'),
-               expiration=int(credentials.get('expires_on')) * 1000 if credentials.get('expires_on') is not None else None)
+    return cls(
+        url=hdfs_config.WEBHDFS_URL.get(),
+        fs_defaultfs=hdfs_config.FS_DEFAULTFS.get(),
+        logical_name=None,
+        security_enabled=False,
+        ssl_cert_ca_verify=False,
+        temp_dir=None,
+        umask=get_umask_mode(),
+        hdfs_supergroup=None,
+        access_token=credentials.get('access_token'),
+        token_type=credentials.get('token_type'),
+        expiration=int(credentials.get('expires_on')) * 1000 if credentials.get('expires_on') is not None else None
+    )
 
   def get_client(self, url):
     return resource.Resource(http_client.HttpClient(url, exc_class=WebHdfsException, logger=LOG))
@@ -123,6 +129,14 @@ class ABFS(object):
       "x-ms-version" : "2019-02-02" #note this is required for setaccesscontrols
     }
 
+  @property
+  def superuser(self):
+    return self._superuser
+
+  @property
+  def supergroup(self):
+    return self._supergroup
+
   # Parse info about filesystems, directories, and files
   # --------------------------------
   def isdir(self, path):

+ 28 - 23
desktop/libs/azure/src/azure/adls/webhdfs.py

@@ -41,18 +41,20 @@ UPLOAD_CHUCK_SIZE = 30 * 1000 * 1000
 
 class WebHdfs(HadoopWebHdfs):
 
-  def __init__(self, url,
-               fs_defaultfs,
-               logical_name=None,
-               hdfs_superuser=None,
-               security_enabled=False,
-               ssl_cert_ca_verify=True,
-               temp_dir="/tmp",
-               umask=0o1022,
-               hdfs_supergroup=None,
-               access_token=None,
-               token_type=None,
-               expiration=None):
+  def __init__(
+      self, url,
+      fs_defaultfs,
+      logical_name=None,
+      hdfs_superuser=None,
+      security_enabled=False,
+      ssl_cert_ca_verify=True,
+      temp_dir="/tmp",
+      umask=0o1022,
+      hdfs_supergroup=None,
+      access_token=None,
+      token_type=None,
+      expiration=None
+    ):
     self._url = url
     self._superuser = hdfs_superuser
     self._security_enabled = security_enabled
@@ -84,17 +86,20 @@ class WebHdfs(HadoopWebHdfs):
     credentials = auth_provider.get_credentials()
     fs_defaultfs = get_default_adls_fs()
     url = get_default_adls_url()
-    return cls(url=url,
-               fs_defaultfs=fs_defaultfs,
-               logical_name=None,
-               security_enabled=False,
-               ssl_cert_ca_verify=False,
-               temp_dir=None,
-               umask=get_umask_mode(),
-               hdfs_supergroup=None,
-               access_token=credentials.get('access_token'),
-               token_type=credentials.get('token_type'),
-               expiration=int(credentials.get('expires_on')) * 1000 if credentials.get('expires_on')  is not None else None)
+
+    return cls(
+        url=url,
+        fs_defaultfs=fs_defaultfs,
+        logical_name=None,
+        security_enabled=False,
+        ssl_cert_ca_verify=False,
+        temp_dir=None,
+        umask=get_umask_mode(),
+        hdfs_supergroup=None,
+        access_token=credentials.get('access_token'),
+        token_type=credentials.get('token_type'),
+        expiration=int(credentials.get('expires_on')) * 1000 if credentials.get('expires_on')  is not None else None
+    )
 
   def get_client(self, url):
     return resource.Resource(http_client.HttpClient(url, exc_class=WebHdfsException, logger=LOG))