Browse Source

HUE-8908 [fb] First commit for ABFS (backend)

Jean-Francois Desjeans Gauthier 6 năm trước cách đây
mục cha
commit
9fe0447b75

+ 6 - 0
desktop/conf.dist/hue.ini

@@ -1701,6 +1701,12 @@
       ## fs_defaultfs=adl://<account_name>.azuredatalakestore.net
       ## webhdfs_url=https://<account_name>.azuredatalakestore.net/webhdfs/v1
 
+  [[abfs_clusters]]
+    # Default ABFS cluster
+    [[[default]]]
+      ## fs_defaultfs=abfss://<container_name>@<account_name>.dfs.core.windows.net
+      ## webhdfs_url=https://<container_name>@<account_name>.dfs.core.windows.net
+
 ###########################################################################
 # Settings for the Sentry lib
 ###########################################################################

+ 6 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1687,6 +1687,12 @@
       ## fs_defaultfs=adl://<account_name>.azuredatalakestore.net
       ## webhdfs_url=https://<account_name>.azuredatalakestore.net/webhdfs/v1
 
+  [[abfs_clusters]]
+    # Default ABFS cluster
+    [[[default]]]
+      ## fs_defaultfs=abfss://<container_name>@<account_name>.dfs.core.windows.net
+      ## webhdfs_url=https://<container_name>@<account_name>.dfs.core.windows.net
+
 ###########################################################################
 # Settings for the Sentry lib
 ###########################################################################

+ 5 - 1
desktop/core/src/desktop/lib/fsmanager.py

@@ -24,7 +24,7 @@ import aws
 import azure.client
 
 from aws.conf import is_enabled as is_s3_enabled
-from azure.conf import is_adls_enabled
+from azure.conf import is_adls_enabled, is_abfs_enabled
 from hadoop.cluster import get_hdfs
 from hadoop.conf import has_hdfs_enabled
 
@@ -49,6 +49,10 @@ if is_adls_enabled():
   FS_GETTERS['adl'] = azure.client.get_client
   if DEFAULT_SCHEMA is None:
       DEFAULT_SCHEMA = 'adl'
+if is_abfs_enabled():
+  FS_GETTERS['abfs'] = azure.client.get_client_abfs
+  if DEFAULT_SCHEMA is None:
+      DEFAULT_SCHEMA = 'abfs'
 
 
 def get_filesystem(name='default'):

+ 15 - 0
desktop/libs/azure/src/azure/abfs/__init__.py

@@ -0,0 +1,15 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

+ 198 - 0
desktop/libs/azure/src/azure/abfs/abfs.py

@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Interfaces for ABFS
+"""
+import logging
+import threading
+
+from urlparse import urlparse
+from hadoop.hdfs_site import get_umask_mode
+
+from hadoop.fs.exceptions import WebHdfsException
+
+from desktop.lib.rest import http_client, resource
+
+
+LOG = logging.getLogger(__name__)
+
+#Azure has a 30MB block limit on upload.
+UPLOAD_CHUCK_SIZE = 30 * 1000 * 1000
+PERMISSION_ACTION_ABFS = "abfs_access"
+
+
+class ABFS(object):
+
+  def __init__(self, url,
+               fs_defaultfs,
+               logical_name=None,
+               hdfs_superuser=None,
+               security_enabled=False,
+               ssl_cert_ca_verify=True,
+               temp_dir="/tmp",
+               umask=01022,
+               hdfs_supergroup=None,
+               auth_provider=None):
+    self._url = url
+    self._superuser = hdfs_superuser
+    self._security_enabled = security_enabled
+    self._ssl_cert_ca_verify = ssl_cert_ca_verify
+    self._temp_dir = temp_dir
+    self._umask = umask
+    self._fs_defaultfs = fs_defaultfs
+    self._logical_name = logical_name
+    self._supergroup = hdfs_supergroup
+    self._auth_provider = auth_provider
+    split = urlparse(fs_defaultfs)
+    self._scheme = split.scheme
+    self._netloc = split.netloc
+    self._is_remote = True
+    self._has_trash_support = False
+    self._filebrowser_action = PERMISSION_ACTION_ABFS
+
+    self._client = http_client.HttpClient(url, exc_class=WebHdfsException, logger=LOG)
+    self._root = resource.Resource(self._client)
+
+    # To store user info
+    self._thread_local = threading.local()
+
+    LOG.debug("Initializing ABFS : %s (security: %s, superuser: %s)" % (self._url, self._security_enabled, self._superuser))
+
+  @classmethod
+  def from_config(cls, hdfs_config, auth_provider):
+    return cls(url=hdfs_config.WEBHDFS_URL.get(),
+               fs_defaultfs=hdfs_config.FS_DEFAULTFS.get(),
+               logical_name=None,
+               security_enabled=False,
+               ssl_cert_ca_verify=False,
+               temp_dir=None,
+               umask=get_umask_mode(),
+               hdfs_supergroup=None,
+               auth_provider=auth_provider)
+
+  def _getheaders(self):
+    return {
+      "Authorization": self._auth_provider.get_token(),
+    }
+
+  def isdir(self, path):
+    raise NotImplementedError("")
+
+  def isfile(self, path):
+    raise NotImplementedError("")
+
+  def stats(self, path):
+    raise NotImplementedError("")
+
+  def listdir_stats(self, path, **kwargs):
+    raise NotImplementedError("")
+
+  def listdir(self, path, glob=None):
+    raise NotImplementedError("") # e.g. self._root.get('/', {'resource': 'account'}, self._getheaders())
+
+  def normpath(self, path):
+    raise NotImplementedError("")
+
+  def netnormpath(self, path):
+    raise NotImplementedError("")
+
+  def open(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def exists(self, path):
+    raise NotImplementedError("")
+
+  def isroot(self, path):
+    raise NotImplementedError("")
+
+  def parent_path(self, path):
+    raise NotImplementedError("")
+
+  def join(self, first, *comp_list):
+    raise NotImplementedError("")
+
+  def mkdir(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def read(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def append(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def rmtree(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def remove(self, path, skip_trash=False):
+    raise NotImplementedError("")
+
+  def restore(self, path):
+    raise NotImplementedError("")
+
+  def create(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def create_home_dir(self, home_path=None):
+    raise NotImplementedError("")
+
+  def chown(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def chmod(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def copyFromLocal(self, local_src, remote_dst, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def mktemp(self, subdir='', prefix='tmp', basedir=None):
+    raise NotImplementedError("")
+
+  def purge_trash(self):
+    raise NotImplementedError("")
+
+  # Handle file systems interactions
+  # --------------------------------
+  def copy(self, src, dst, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def copyfile(self, src, dst, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def copy_remote_dir(self, src, dst, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def rename(self, old, new):
+    raise NotImplementedError("")
+
+  def rename_star(self, old_dir, new_dir):
+    raise NotImplementedError("")
+
+  def upload(self, file, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def check_access(self, path, *args, **kwargs):
+    raise NotImplementedError("")
+
+  def mkswap(self, filename, subdir='', suffix='swp', basedir=None):
+    raise NotImplementedError("")
+  
+  def setuser(self, user):
+    self._user = user
+
+  def get_upload_chuck_size(self, path):
+    raise NotImplementedError("")

+ 46 - 0
desktop/libs/azure/src/azure/abfs/abfs_test.py

@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+
+import logging
+import unittest
+
+from azure.abfs.abfs import ABFS
+from azure.active_directory import ActiveDirectory
+from azure.conf import ABFS_CLUSTERS, is_abfs_enabled
+
+from nose.plugins.skip import SkipTest
+
+LOG = logging.getLogger(__name__)
+
+"""
+Interfaces for ADLS via HttpFs/WebHDFS
+"""
+class ABFSTestBase(unittest.TestCase):
+  integration = True
+
+  def setUp(self):
+    if not is_abfs_enabled():
+      raise SkipTest
+    self.client = ABFS.from_config(ABFS_CLUSTERS['default'], ActiveDirectory.from_config(None, version='v2.0'))
+
+  def tearDown(self):
+    pass
+
+  def test_list(self):
+    self.client.listdir('abfs://')
+    pass

+ 17 - 6
desktop/libs/azure/src/azure/active_directory.py

@@ -24,7 +24,7 @@ from desktop.lib.rest import http_client, resource
 LOG = logging.getLogger(__name__)
 
 class ActiveDirectory(object):
-  def __init__(self, url=None, aws_access_key_id=None, aws_secret_access_key=None):
+  def __init__(self, url=None, aws_access_key_id=None, aws_secret_access_key=None, version=None):
     self._access_key_id = aws_access_key_id
     self._secret_access_key = aws_secret_access_key
     self._url = url;
@@ -32,34 +32,45 @@ class ActiveDirectory(object):
     self._client = http_client.HttpClient(url, logger=LOG)
     self._root = resource.Resource(self._client)
     self._token = None
+    self._version = version
+
 
   def get_token(self):
+    if not self._version:
+      return self._get_token({"resource": "https://management.core.windows.net/"})
+    else:
+      return self._get_token({"scope": "https://storage.azure.com/.default"})
+
+
+  def _get_token(self, params=None):
     is_token_expired = self._token is None or time() >= self._token["expires_on"]
     if is_token_expired:
       LOG.debug("Authenticating to Azure Active Directory: %s" % self._url)
       data = {
         "grant_type" : "client_credentials",
-        "resource" : "https://management.core.windows.net/",
         "client_id" : self._access_key_id,
         "client_secret" : self._secret_access_key
       }
+      data.update(params)
       self._token = self._root.post("/", data=data, log_response=False);
-      self._token["expires_on"] = int(self._token["expires_on"])
+      self._token["expires_on"] = int(self._token.get("expires_on", self._token.get("expires_in")))
 
     return self._token["token_type"] + " " + self._token["access_token"]
 
+
   @classmethod
-  def from_config(cls, conf):
+  def from_config(cls, conf='default', version=None):
     access_key_id = AZURE_ACCOUNTS['default'].CLIENT_ID.get()
     secret_access_key = AZURE_ACCOUNTS['default'].CLIENT_SECRET.get()
 
     if None in (access_key_id, secret_access_key):
       raise ValueError('Can\'t create azure client, credential is not configured')
 
-    url = get_default_refresh_url()
+    url = get_default_refresh_url(version)
 
     return cls(
       url,
       aws_access_key_id=access_key_id,
-      aws_secret_access_key=secret_access_key
+      aws_secret_access_key=secret_access_key,
+      version=version
     )

+ 17 - 0
desktop/libs/azure/src/azure/client.py

@@ -20,6 +20,7 @@ import os
 
 from azure import conf
 from azure.adls.webhdfs import WebHdfs
+from azure.abfs.abfs import ABFS
 from azure.active_directory import ActiveDirectory
 
 LOG = logging.getLogger(__name__)
@@ -33,6 +34,13 @@ def get_client(identifier='default'):
     raise ValueError('Unknown azure client: %s, check your configuration' % identifier)
   return CLIENT_CACHE["adls"][identifier]
 
+def get_client_abfs(identifier='default'):
+  global CLIENT_CACHE
+  _init_clients()
+  if identifier not in CLIENT_CACHE["abfs"]:
+    raise ValueError('Unknown azure client: %s, check your configuration' % identifier)
+  return CLIENT_CACHE["abfs"][identifier]
+
 def _init_clients():
   global CLIENT_CACHE
   if CLIENT_CACHE is not None:
@@ -40,17 +48,26 @@ def _init_clients():
   CLIENT_CACHE = {}
   CLIENT_CACHE["azure"] = {}
   CLIENT_CACHE["adls"] = {}
+  CLIENT_CACHE["abfs"] = {}
   for identifier in conf.AZURE_ACCOUNTS.keys():
     CLIENT_CACHE["azure"][identifier] = _make_azure_client(identifier)
 
   for identifier in conf.ADLS_CLUSTERS.keys():
     CLIENT_CACHE["adls"][identifier] = _make_adls_client(identifier)
 
+  for identifier in conf.ABFS_CLUSTERS.keys():
+    CLIENT_CACHE["abfs"][identifier] = _make_abfs_client(identifier)
+
 def _make_adls_client(identifier):
   client_conf = conf.ADLS_CLUSTERS[identifier]
   azure_client = CLIENT_CACHE["azure"][identifier]
   return WebHdfs.from_config(client_conf, azure_client)
 
+def _make_abfs_client(identifier):
+  client_conf = conf.ABFS_CLUSTERS[identifier]
+  azure_client = CLIENT_CACHE["azure"][identifier]
+  return ABFS.from_config(client_conf, azure_client)
+
 def _make_azure_client(identifier):
   client_conf = conf.AZURE_ACCOUNTS[identifier]
   return ActiveDirectory.from_config(client_conf)

+ 31 - 5
desktop/libs/azure/src/azure/conf.py

@@ -25,7 +25,7 @@ from hadoop.core_site import get_adls_client_id, get_adls_authentication_code, g
 
 LOG = logging.getLogger(__name__)
 
-REFRESH_URL = 'https://login.microsoftonline.com/<tenant_id>/oauth2/token'
+REFRESH_URL = 'https://login.microsoftonline.com/<tenant_id>/oauth2/<version>token'
 
 
 def get_default_client_id():
@@ -49,8 +49,8 @@ def get_default_tenant_id():
   tenant_id_script = AZURE_ACCOUNTS['default'].TENANT_ID_SCRIPT.get()
   return tenant_id_script or get_adls_refresh_url()
 
-def get_default_refresh_url():
-  refresh_url = REFRESH_URL.replace('<tenant_id>', AZURE_ACCOUNTS['default'].TENANT_ID.get())
+def get_default_refresh_url(version):
+  refresh_url = REFRESH_URL.replace('<tenant_id>', AZURE_ACCOUNTS['default'].TENANT_ID.get()).replace('<version>', version + '/' if version else '')
   refresh_url = refresh_url if refresh_url else get_adls_refresh_url()
   return refresh_url or get_adls_refresh_url()
 
@@ -63,6 +63,12 @@ def get_default_adls_url():
 def get_default_adls_fs():
   return ADLS_CLUSTERS['default'].FS_DEFAULTFS.get()
 
+def get_default_abfs_url():
+  return ABFS_CLUSTERS['default'].WEBHDFS_URL.get()
+
+def get_default_abfs_fs():
+  return ABFS_CLUSTERS['default'].FS_DEFAULTFS.get()
+
 ADLS_CLUSTERS = UnspecifiedConfigSection(
   "adls_clusters",
   help="One entry for each ADLS cluster",
@@ -121,20 +127,40 @@ AZURE_ACCOUNTS = UnspecifiedConfigSection(
   )
 )
 
+ABFS_CLUSTERS = UnspecifiedConfigSection(
+  "abfs_clusters",
+  help="One entry for each ABFS cluster",
+  each=ConfigSection(
+    help="Information about a single ABFS cluster",
+    members=dict(
+      FS_DEFAULTFS=Config("fs_defaultfs", help="abfss://<container_name>@<account_name>.dfs.core.windows.net", type=str, default=None),
+      WEBHDFS_URL=Config("webhdfs_url",
+                         help="https://<container_name>@<account_name>.dfs.core.windows.net",
+                         type=str, default=None),
+    )
+  )
+)
 
 def is_adls_enabled():
-  return ('default' in AZURE_ACCOUNTS.keys() and AZURE_ACCOUNTS['default'].get_raw() and AZURE_ACCOUNTS['default'].CLIENT_ID.get() is not None)
+  return ('default' in AZURE_ACCOUNTS.keys() and AZURE_ACCOUNTS['default'].get_raw() and AZURE_ACCOUNTS['default'].CLIENT_ID.get() is not None and 'default' in ADLS_CLUSTERS.keys() and ADLS_CLUSTERS['default'].get_raw())
+
+def is_abfs_enabled():
+  return ('default' in AZURE_ACCOUNTS.keys() and AZURE_ACCOUNTS['default'].get_raw() and AZURE_ACCOUNTS['default'].CLIENT_ID.get() is not None and 'default' in ABFS_CLUSTERS.keys() and ABFS_CLUSTERS['default'].get_raw())
 
 def has_adls_access(user):
   from desktop.auth.backend import is_admin
   return user.is_authenticated() and user.is_active and (is_admin(user) or user.has_hue_permission(action="adls_access", app="filebrowser"))
 
+def has_abfs_access(user):
+  from desktop.auth.backend import is_admin
+  return user.is_authenticated() and user.is_active and (is_admin(user) or user.has_hue_permission(action="abfs_access", app="filebrowser"))
+
 def config_validator(user):
   res = []
 
   import azure.client # Avoid cyclic loop
 
-  if is_adls_enabled():
+  if is_adls_enabled() or is_abfs_enabled():
     try:
       headers = azure.client.get_client('default')._getheaders()
       if not headers.get('Authorization'):