Bläddra i källkod

HUE-8925 [fb] Fix _parse_core_site when no HDFS_CLUSTERS

Jean-Francois Desjeans Gauthier 6 år sedan
förälder
incheckning
a55b18197f

+ 1 - 1
desktop/core/src/desktop/lib/fs/proxyfs.py

@@ -37,7 +37,7 @@ class ProxyFS(object):
     self._fs_dict = filesystems_dict
     self._fs_dict = filesystems_dict
     self._user = {'user': None} # wrapping in an object to avoid triggering __getattr__ / __setattr__
     self._user = {'user': None} # wrapping in an object to avoid triggering __getattr__ / __setattr__
     self._default_scheme = default_scheme
     self._default_scheme = default_scheme
-    self._default_fs = filesystems_dict[self._default_scheme](name, default_scheme)
+    self._default_fs = filesystems_dict[self._default_scheme](name)
 
 
   def __getattr__(self, item):
   def __getattr__(self, item):
     return getattr(object.__getattribute__(self, "_default_fs"), item)
     return getattr(object.__getattribute__(self, "_default_fs"), item)

+ 13 - 3
desktop/core/src/desktop/lib/paths.py

@@ -89,11 +89,21 @@ def get_run_root(*append):
   return __get_root(*append)
   return __get_root(*append)
 
 
 
 
+def get_hadoop_conf_dir_default_config():
+  from hadoop.conf import HDFS_CLUSTERS, get_hadoop_conf_dir_default
+  if HDFS_CLUSTERS.keys():
+    yarn_site_path = HDFS_CLUSTERS[HDFS_CLUSTERS.keys()[0]].HADOOP_CONF_DIR.get()
+  else:
+    yarn_site_path = get_hadoop_conf_dir_default()
+  return yarn_site_path
+
+
 def get_config_root(*append):
 def get_config_root(*append):
   """
   """
   Currently gets it based on the Hadoop configuration location.
   Currently gets it based on the Hadoop configuration location.
   """
   """
-  from hadoop.conf import HDFS_CLUSTERS
+  return os.path.abspath(os.path.join(get_hadoop_conf_dir_default_config(), '..', *append))
+
 
 
-  yarn_site_path = HDFS_CLUSTERS['default'].HADOOP_CONF_DIR.get()
-  return os.path.abspath(os.path.join(yarn_site_path, '..', *append))
+def get_config_root_hadoop(*append):
+  return os.path.abspath(os.path.join(get_hadoop_conf_dir_default_config(), *append))

+ 6 - 7
desktop/libs/aws/src/aws/client.py

@@ -21,15 +21,13 @@ import datetime
 import logging
 import logging
 import os
 import os
 
 
-import boto
-import boto.s3
 import boto.s3.connection
 import boto.s3.connection
-import boto.utils
 
 
 from aws import conf as aws_conf
 from aws import conf as aws_conf
 from aws.s3.s3fs import S3FileSystemException
 from aws.s3.s3fs import S3FileSystemException
 from aws.s3.s3fs import S3FileSystem
 from aws.s3.s3fs import S3FileSystem
 
 
+from desktop.conf import DEFAULT_USER
 from desktop.lib.idbroker import conf as conf_idbroker
 from desktop.lib.idbroker import conf as conf_idbroker
 from desktop.lib.idbroker.client import IDBroker
 from desktop.lib.idbroker.client import IDBroker
 
 
@@ -39,9 +37,10 @@ HTTP_SOCKET_TIMEOUT_S = 60
 
 
 CLIENT_CACHE = None
 CLIENT_CACHE = None
 
 
+_DEFAULT_USER = DEFAULT_USER.get()
 
 
 # FIXME: Should we check hue principal for the default user?
 # FIXME: Should we check hue principal for the default user?
-def _get_cache_key(identifier='default', user='HUE'): # FIXME: Caching via username has issues when users get deleted. Need to switch to userid, but bigger change
+def _get_cache_key(identifier='default', user=_DEFAULT_USER): # FIXME: Caching via username has issues when users get deleted. Need to switch to userid, but bigger change
   return identifier + ':' + user
   return identifier + ':' + user
 
 
 
 
@@ -54,7 +53,7 @@ def current_ms_from_utc():
   return (datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds() * 1000
   return (datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds() * 1000
 
 
 
 
-def get_client(identifier='default', user=None):
+def get_client(identifier='default', user=_DEFAULT_USER):
   global CLIENT_CACHE
   global CLIENT_CACHE
   _init_clients()
   _init_clients()
 
 
@@ -68,7 +67,7 @@ def get_client(identifier='default', user=None):
     CLIENT_CACHE[cache_key] = client
     CLIENT_CACHE[cache_key] = client
     return client
     return client
 
 
-def get_credential_provider(identifier='default', user=None):
+def get_credential_provider(identifier='default', user=_DEFAULT_USER):
   client_conf = aws_conf.AWS_ACCOUNTS[identifier] if identifier in aws_conf.AWS_ACCOUNTS else None
   client_conf = aws_conf.AWS_ACCOUNTS[identifier] if identifier in aws_conf.AWS_ACCOUNTS else None
   return CredentialProviderIDBroker(IDBroker.from_core_site('s3a', user)) if conf_idbroker.is_idbroker_enabled('s3a') else CredentialProviderConf(client_conf)
   return CredentialProviderIDBroker(IDBroker.from_core_site('s3a', user)) if conf_idbroker.is_idbroker_enabled('s3a') else CredentialProviderConf(client_conf)
 
 
@@ -87,7 +86,7 @@ def _init_clients():
     CLIENT_CACHE[_get_cache_key()] = _make_client('default')
     CLIENT_CACHE[_get_cache_key()] = _make_client('default')
 
 
 
 
-def _make_client(identifier, user=None):
+def _make_client(identifier, user=_DEFAULT_USER):
   client_conf = aws_conf.AWS_ACCOUNTS[identifier] if identifier in aws_conf.AWS_ACCOUNTS else None
   client_conf = aws_conf.AWS_ACCOUNTS[identifier] if identifier in aws_conf.AWS_ACCOUNTS else None
 
 
   client = Client.from_config(client_conf, get_credential_provider(identifier, user))
   client = Client.from_config(client_conf, get_credential_provider(identifier, user))

+ 5 - 1
desktop/libs/hadoop/src/hadoop/conf.py

@@ -57,6 +57,10 @@ UPLOAD_CHUNK_SIZE = Config(
 def has_hdfs_enabled():
 def has_hdfs_enabled():
   return HDFS_CLUSTERS.keys()
   return HDFS_CLUSTERS.keys()
 
 
+def get_hadoop_conf_dir_default():
+  """ get from environment variable HADOOP_CONF_DIR or "/etc/hadoop/conf" """
+  return os.environ.get("HADOOP_CONF_DIR", "/etc/hadoop/conf")
+
 
 
 HDFS_CLUSTERS = UnspecifiedConfigSection(
 HDFS_CLUSTERS = UnspecifiedConfigSection(
   "hdfs_clusters",
   "hdfs_clusters",
@@ -86,7 +90,7 @@ HDFS_CLUSTERS = UnspecifiedConfigSection(
                       default='/tmp', type=str),
                       default='/tmp', type=str),
       HADOOP_CONF_DIR = Config(
       HADOOP_CONF_DIR = Config(
         key="hadoop_conf_dir",
         key="hadoop_conf_dir",
-        default=os.environ.get("HADOOP_CONF_DIR", "/etc/hadoop/conf"),
+        dynamic_default=get_hadoop_conf_dir_default,
         help=("Directory of the Hadoop configuration) Defaults to the environment variable " +
         help=("Directory of the Hadoop configuration) Defaults to the environment variable " +
               "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")
               "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")
       )
       )

+ 11 - 15
desktop/libs/hadoop/src/hadoop/core_site.py

@@ -17,11 +17,11 @@
 
 
 import errno
 import errno
 import logging
 import logging
-import os.path
 
 
-import conf
 import confparse
 import confparse
 
 
+from desktop.lib.paths import get_config_root_hadoop
+
 __all = ['get_conf', 'get_trash_interval', 'get_s3a_access_key', 'get_s3a_secret_key']
 __all = ['get_conf', 'get_trash_interval', 'get_s3a_access_key', 'get_s3a_secret_key']
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
@@ -62,19 +62,15 @@ def _parse_core_site():
   global _CORE_SITE_DICT
   global _CORE_SITE_DICT
   global _CORE_SITE_PATH
   global _CORE_SITE_PATH
 
 
-  for indentifier in conf.HDFS_CLUSTERS.get():
-    try:
-      _CORE_SITE_PATH = os.path.join(conf.HDFS_CLUSTERS[indentifier].HADOOP_CONF_DIR.get(), 'core-site.xml') # Will KeyError and be empty as HADOOP_CONF_DIR does not exist anymore
-      data = file(_CORE_SITE_PATH, 'r').read()
-      break
-    except KeyError:
-      data = ""
-    except IOError, err:
-      if err.errno != errno.ENOENT:
-        LOG.error('Cannot read from "%s": %s' % (_CORE_SITE_PATH, err))
-        return
-      # Keep going and make an empty ConfParse
-      data = ""
+  try:
+    _CORE_SITE_PATH = get_config_root_hadoop('core-site.xml')
+    data = file(_CORE_SITE_PATH, 'r').read()
+  except IOError, err:
+    if err.errno != errno.ENOENT:
+      LOG.error('Cannot read from "%s": %s' % (_CORE_SITE_PATH, err))
+      return
+    # Keep going and make an empty ConfParse
+    data = ""
 
 
   _CORE_SITE_DICT = confparse.ConfParse(data)
   _CORE_SITE_DICT = confparse.ConfParse(data)