浏览代码

[core] Do not skip apps because HADOOP_BIN is not there

Hue used to skip loading apps when HADOOP_BIN is absent. This is undesirable
because most of the Hadoop interaction is done via web services.
bc Wong 13 年之前
父节点
当前提交
871a084424

+ 1 - 18
desktop/core/src/desktop/appmanager.py

@@ -23,7 +23,6 @@ import traceback
 import pkg_resources
 
 import desktop
-import desktop.lib.apputil
 from desktop.lib.paths import get_desktop_root
 
 # Directories where apps and libraries are to be found
@@ -37,8 +36,7 @@ LOG = logging.getLogger(__name__)
 # Global variables set after calling load_apps()
 ######################################################################
 
-# List of DesktopModuleInfo that have been loaded and skipped
-BROKEN_APPS = None
+# List of DesktopModuleInfo that have been loaded
 DESKTOP_LIBS = None
 DESKTOP_APPS = None
 DESKTOP_MODULES = [ ]           # Sum of APPS and LIBS
@@ -222,21 +220,6 @@ def load_apps():
   LOG.debug("Loaded Desktop Applications: " + ", ".join(a.name for a in DESKTOP_APPS))
   DESKTOP_MODULES += DESKTOP_APPS
 
-def determine_broken_apps():
-  global DESKTOP_APPS
-  global BROKEN_APPS
-  BROKEN_APPS = []
-
-  hadoop_ok = desktop.lib.apputil.has_hadoop()
-  # If there is no hadoop installation, note which apps were loaded which
-  # require Hadoop.
-  if not hadoop_ok:
-    for dmi in DESKTOP_APPS:
-      app_settings = dmi.settings
-      # <app_module>.settings.REQUIRES_HADOOP is True by default
-      if app_settings is None or getattr(app_settings, 'REQUIRES_HADOOP', True):
-        LOG.warn('App %s requires Hadoop but Hadoop not present.' % (dmi,))
-        BROKEN_APPS.append(dmi)
 
 def get_desktop_module(name):
   """

+ 0 - 3
desktop/core/src/desktop/conf.py

@@ -407,9 +407,6 @@ def config_validator():
     res.extend(validate_path(KERBEROS.KINIT_PATH, is_dir=False))
     res.extend(validate_path(KERBEROS.CCACHE_PATH, is_dir=False))
 
-  for broken_app in appmanager.BROKEN_APPS:
-    res.append(('Working Hadoop', 'App %s requires Hadoop but Hadoop is not present.' % (broken_app,)))
-
   if LDAP.NT_DOMAIN.get() is not None or \
       LDAP.LDAP_USERNAME_PATTERN.get() is not None:
     if LDAP.LDAP_URL.get() is None:

+ 0 - 8
desktop/core/src/desktop/lib/apputil.py

@@ -21,14 +21,6 @@ import sys
 from django.conf import settings
 
 
-def has_hadoop():
-  """has_hadoop() -> bool   (Whether the Hadoop binary is installed)"""
-  # Do lazy import, since desktop.lib shouldn't depend on an sdk library (i.e. hadoop.conf)
-  # in general.
-  import hadoop.conf
-  return os.path.isfile(hadoop.conf.HADOOP_BIN.get())
-
-
 def get_current_app(frame=None):
   """
   Return the name of the app from INSTALLED_APPS that is most recently

+ 1 - 16
desktop/core/src/desktop/lib/fsmanager.py

@@ -16,9 +16,6 @@
 # limitations under the License.
 
 from hadoop.cluster import get_all_hdfs
-from desktop import conf
-from hadoop.fs import LocalSubFileSystem
-from desktop.lib.apputil import has_hadoop
 
 _filesystems = None
 
@@ -27,20 +24,8 @@ def _init_filesystems():
   global _filesystems
   if _filesystems is not None:
     return
-  _filesystems = {}
+  _filesystems = get_all_hdfs()
 
-  if has_hadoop():
-    # Load HDFSes
-    _filesystems.update(get_all_hdfs())
-
-  # Load local
-  for identifier in conf.LOCAL_FILESYSTEMS.keys():
-    local_fs = LocalSubFileSystem(
-        conf.LOCAL_FILESYSTEMS[identifier].PATH.get())
-    if identifier in _filesystems: 
-      raise Exception(("Filesystem '%s' configured twice. First is " +
-        "%s, second is local FS %s") % (identifier, _filesystems[identifier], local_fs))
-    _filesystems[identifier] = local_fs
 
 def get_filesystem(name):
   """Return the filesystem with the given name. If the filesystem is not defined,

+ 0 - 13
desktop/core/src/desktop/manage_entry.py

@@ -50,19 +50,6 @@ def entry():
   parser.parse_args(argv)
   if len(argv) > 1:
     prof_id = subcommand = argv[1]
-
-    # See if this command belongs to a disabled app
-    commands = { }
-    skipped_apps = sum([ app.django_apps for app in appmanager.BROKEN_APPS ], [])
-    for app_name in skipped_apps:
-      try:
-        path = find_management_module(app_name)
-        if subcommand in find_commands(path):
-          LOG.info("NOT starting the command '%s' from the disabled application '%s'. Exit." %
-                   (subcommand, app_name))
-          sys.exit(0)
-      except ImportError:
-        pass # No management module - ignore this app
   else:
     prof_id = str(os.getpid())
 

+ 3 - 5
desktop/core/src/desktop/middleware.py

@@ -120,8 +120,6 @@ class ClusterMiddleware(object):
     Sets request.fs and request.jt on every request to point to the
     configured filesystem.
     """
-    has_hadoop = apputil.has_hadoop()
-
     request.fs_ref = request.REQUEST.get('fs', view_kwargs.get('fs', 'default'))
     if "fs" in view_kwargs:
       del view_kwargs["fs"]
@@ -131,10 +129,10 @@ class ClusterMiddleware(object):
     except KeyError:
       raise KeyError('Cannot find HDFS called "%s"' % (request.fs_ref,))
 
-    if request.user.is_authenticated() and request.fs is not None:
-      request.fs.setuser(request.user.username)
+    if request.user.is_authenticated():
+      if request.fs is not None:
+        request.fs.setuser(request.user.username)
 
-    if request.user.is_authenticated() and has_hadoop:
       request.jt = cluster.get_default_mrcluster()
       if request.jt is not None:
         request.jt.setuser(request.user.username)

+ 0 - 2
desktop/core/src/desktop/settings.py

@@ -189,8 +189,6 @@ _app_conf_modules.append(dict(module=desktop.conf, config_key=None))
 conf.initialize(_lib_conf_modules, _config_dir)
 conf.initialize(_app_conf_modules, _config_dir)
 
-appmanager.determine_broken_apps()
-
 # Now that we've loaded the desktop conf, set the django DEBUG mode based on the conf.
 DEBUG = desktop.conf.DJANGO_DEBUG_MODE.get()
 TEMPLATE_DEBUG = DEBUG