소스 검색

[core] Remove old Hadoop configs

Removed HDFS trash info
Tested with trash disabled
Romain Rigaux 12 년 전
부모
커밋
c51589fcca

+ 3 - 20
apps/about/src/about/templates/admin_wizard.mako

@@ -30,15 +30,15 @@ ${ header.menubar() }
         % if user.is_superuser:
           ${ _('Quick Start Wizard') } -
         % endif
-        Hue™ ${version} - The Hadoop UI
+        Hue&trade; ${version} - <a href="http://gethue.com" target="_blank" style="color:#777" title="${ _('Visit the project website!') }">The Hadoop UI</a>
       </h2>
 
      % if user.is_superuser:
 
       <div class="card-body">
-            <br/>
+        <br/>
 
-          <div class="row-fluid">
+         <div class="row-fluid">
           <div id="properties" class="section">
             <ul class="nav nav-tabs" style="margin-bottom: 0">
               <li class="active"><a href="#step1" class="step">${ _('Step 1:') } <i
@@ -62,23 +62,6 @@ ${ header.menubar() }
               </div>
             </div>
 
-          <div class="card card-home card-tab card-tab-bordertop">
-            <h2 class="card-heading simple">${ _('HDFS Trash Configuration') }</h2>
-          <div class="card-body">
-          <p>
-            % if trash_enabled:
-            <h5>${ _('Trash is active.')}</h5>
-            % else:
-            ${ _('You can activate trash collection by setting fs.trash.interval in core-site.xml:')}<br/><br/>
-            <pre>
-  &#60;property&#62;
-    &#60;name&#62;fs.trash.interval&#60;/name&#62;
-    &#60;value&#62;10060&#60;/value&#62;
-  &#60;/property&#62;</pre>
-            % endif
-          </p>
-          </div>
-          </div>
           </div>
 
           <div id="step2" class="stepDetails hide">

+ 0 - 4
apps/about/src/about/views.py

@@ -26,22 +26,18 @@ from desktop.lib.django_util import render
 from desktop.models import Settings
 from desktop import appmanager
 
-from hadoop.core_site import get_trash_interval
-
 
 def admin_wizard(request):
   apps = appmanager.get_apps(request.user)
   app_names = [app.name for app in sorted(apps, key=lambda app: app.menu_index)]
 
   tours_and_tutorials = Settings.get_settings().tours_and_tutorials
-  trash_enabled = get_trash_interval() > 0
 
   return render('admin_wizard.mako', request, {
       'version': settings.HUE_DESKTOP_VERSION,
       'apps': dict([(app.name, app) for app in apps]),
       'app_names': app_names,
       'tours_and_tutorials': tours_and_tutorials,
-      'trash_enabled': trash_enabled
   })
 
 

+ 1 - 8
apps/filebrowser/src/filebrowser/templates/fb_components.mako

@@ -31,7 +31,7 @@ from django.utils.translation import ugettext as _
             <input id="hueBreadcrumbText" type="text" class="input-xxlarge" style="margin-top:4px;margin-right:4px;display:none" data-bind="value: currentPath" />
         </li>
         <li class="pull-right">
-          <a href="${url('filebrowser.views.view', path=urlencode(path))}?default_to_trash" style="line-height:18px" data-bind="visible: trashEnabled" title="${_('View trash')}">
+          <a href="${url('filebrowser.views.view', path=urlencode(path))}?default_to_trash" style="line-height:18px" title="${_('View trash')}">
             <i class="fa fa-trash-o"></i> ${_('View trash')}
           </a>
         </li>
@@ -51,11 +51,6 @@ from django.utils.translation import ugettext as _
             </ul>
         </li>
         % endif
-        % if not trash_enabled and is_superuser:
-        <li id="trash-help" class="pull-right" style="width:30px;text-align:right;line-height:35px;">
-            <i class='fa fa-question-circle'></i>
-        </li>
-        % endif
     </ul>
 </%def>
 
@@ -78,5 +73,3 @@ from django.utils.translation import ugettext as _
       </div>
   </div>
 </%def>
-
-

+ 0 - 5
apps/filebrowser/src/filebrowser/templates/listdir.mako

@@ -58,7 +58,6 @@ ${ fb_components.menubar() }
             <button class="btn fileToolbarBtn" title="${_('Download')}" data-bind="visible: !inTrash(), click: downloadFile, enable: selectedFiles().length == 1 && selectedFile().type == 'file'"><i class="fa fa-arrow-circle-o-down"></i> ${_('Download')}</button>
             <button class="btn fileToolbarBtn" title="${_('Restore from trash')}" data-bind="visible: inRestorableTrash(), click: restoreTrashSelected, enable: selectedFiles().length > 0"><i class="fa fa-cloud-upload"></i> ${_('Restore')}</button>
             <!-- ko ifnot: inTrash -->
-              <!-- ko if: trashEnabled -->
                 <div id="delete-dropdown" class="btn-group" style="vertical-align: middle">
                   <button id="trash-btn" class="btn toolbarBtn" data-bind="enable: selectedFiles().length > 0, click: trashSelected"><i class="fa fa-times"></i> ${_('Move to trash')}</button>
                   <button id="trash-btn-caret" class="btn toolbarBtn dropdown-toggle" data-toggle="dropdown" data-bind="enable: selectedFiles().length > 0">
@@ -68,10 +67,6 @@ ${ fb_components.menubar() }
                     <li><a href="#" class="delete-link" title="${_('Delete forever')}" data-bind="enable: selectedFiles().length > 0, click: deleteSelected"><i class="fa fa-bolt"></i> ${_('Delete forever')}</a></li>
                   </ul>
                 </div>
-              <!-- /ko -->
-            <!-- /ko -->
-            <!-- ko ifnot: trashEnabled -->
-              <button class="btn fileToolbarBtn delete-link" title="${_('Delete forever')}" data-bind="enable: selectedFiles().length > 0, click: deleteSelected"><i class="fa fa-bolt"></i> ${_('Delete forever')}</button>
             <!-- /ko -->
             <button class="btn fileToolbarBtn" title="${_('Submit')}"
               data-bind="visible: selectedFiles().length == 1 && $.inArray(selectedFile().name, ['workflow.xml', 'coordinator.xml', 'bundle.xml']) > -1, click: submitSelected">

+ 2 - 12
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -771,15 +771,6 @@ from django.utils.translation import ugettext as _
         $("#editBreadcrumb").show();
       });
 
-      % if not trash_enabled and is_superuser:
-        $("#trash-help").popover({
-            'title': "${_('Did you know?')}",
-            'content': '${_('You can activate HDFS trash by setting fs.trash.interval in core-site.xml.')}',
-            'trigger': 'hover',
-            'html': true,
-            'placement': 'left'
-        });
-      % endif
       $.ajaxSetup({
         error:function (x, e) {
           if (x.status == 500) {
@@ -916,7 +907,6 @@ from django.utils.translation import ugettext as _
       self.recordsPerPage = ko.observable($.cookie("hueFilebrowserRecordsPerPage"));
       self.targetPageNum = ko.observable(1);
       self.targetPath = ko.observable("${current_request_path}");
-      self.trashEnabled = ko.observable(${ trash_enabled and "true" or "false" });
 
       self.sortBy = ko.observable("name");
       self.sortDescending = ko.observable(false);
@@ -969,11 +959,11 @@ from django.utils.translation import ugettext as _
       self.currentPath = ko.observable(currentDirPath);
 
       self.inTrash = ko.computed(function() {
-        return self.currentPath().match(/^\/user\/.+?\/\.Trash/) && self.trashEnabled();
+        return self.currentPath().match(/^\/user\/.+?\/\.Trash/);
       });
 
       self.inRestorableTrash = ko.computed(function() {
-        return self.currentPath().match(/^\/user\/.+?\/\.Trash\/.+?/) && self.trashEnabled();
+        return self.currentPath().match(/^\/user\/.+?\/\.Trash\/.+?/);
       });
 
       self.getStats = function (callback) {

+ 1 - 9
apps/filebrowser/src/filebrowser/views.py

@@ -22,6 +22,7 @@
 
 import errno
 import logging
+import json
 import mimetypes
 import operator
 import posixpath
@@ -30,11 +31,6 @@ import shutil
 import stat as stat_module
 import os
 
-try:
-  import json
-except ImportError:
-  import simplejson as json
-
 from datetime import datetime
 
 from django.contrib import messages
@@ -65,7 +61,6 @@ from filebrowser.lib import xxd
 from filebrowser.forms import RenameForm, UploadFileForm, UploadArchiveForm, MkDirForm, EditorForm, TouchForm,\
                               RenameFormSet, RmTreeFormSet, ChmodFormSet, ChownFormSet, CopyFormSet, RestoreFormSet,\
                               TrashPurgeForm
-from hadoop.core_site import get_trash_interval
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 
@@ -425,8 +420,6 @@ def listdir_paged(request, path):
     if not request.fs.isdir(path):
         raise PopupException("Not a directory: %s" % (path,))
 
-    trash_enabled = get_trash_interval()
-
     pagenum = int(request.GET.get('pagenum', 1))
     pagesize = int(request.GET.get('pagesize', 30))
 
@@ -488,7 +481,6 @@ def listdir_paged(request, path):
         'page': _massage_page(page),
         'pagesize': pagesize,
         'home_directory': request.fs.isdir(home_dir_path) and home_dir_path or None,
-        'trash_enabled': trash_enabled,
         'sortby': sortby,
         'descending': descending_param,
         # The following should probably be deprecated

+ 0 - 36
desktop/conf.dist/hue.ini

@@ -327,18 +327,6 @@
       # Change this if your HDFS cluster is Kerberos-secured
       ## security_enabled=false
 
-      # Settings about this HDFS cluster. If you install HDFS in a
-      # different location, you need to set the following.
-
-      # Defaults to $HADOOP_HDFS_HOME or /usr/lib/hadoop-hdfs
-      ## hadoop_hdfs_home=/usr/lib/hadoop-hdfs
-
-      # Defaults to $HADOOP_BIN or /usr/bin/hadoop
-      ## hadoop_bin=/usr/bin/hadoop
-
-      # Defaults to $HADOOP_CONF_DIR or /etc/hadoop/conf
-      ## hadoop_conf_dir=/etc/hadoop/conf
-
   # Configuration for YARN (MR2)
   # ------------------------------------------------------------------------
   [[yarn_clusters]]
@@ -356,18 +344,6 @@
       # Change this if your YARN cluster is Kerberos-secured
       ## security_enabled=false
 
-      # Settings about this MR2 cluster. If you install MR2 in a
-      # different location, you need to set the following.
-
-      # Defaults to $HADOOP_MR2_HOME or /usr/lib/hadoop-mapreduce
-      ## hadoop_mapred_home=/usr/lib/hadoop-mapreduce
-
-      # Defaults to $HADOOP_BIN or /usr/bin/hadoop
-      ## hadoop_bin=/usr/bin/hadoop
-
-      # Defaults to $HADOOP_CONF_DIR or /etc/hadoop/conf
-      ## hadoop_conf_dir=/etc/hadoop/conf
-
       # URL of the ResourceManager API
       ## resourcemanager_api_url=http://localhost:8088
 
@@ -400,18 +376,6 @@
       # Change this if your MapReduce cluster is Kerberos-secured
       ## security_enabled=false
 
-      # Settings about this MR1 cluster. If you install MR1 in a
-      # different location, you need to set the following.
-
-      # Defaults to $HADOOP_MR1_HOME or /usr/lib/hadoop-0.20-mapreduce
-      ## hadoop_mapred_home=/usr/lib/hadoop-0.20-mapreduce
-
-      # Defaults to $HADOOP_BIN or /usr/bin/hadoop
-      ## hadoop_bin=/usr/bin/hadoop
-
-      # Defaults to $HADOOP_CONF_DIR or /etc/hadoop/conf
-      ## hadoop_conf_dir=/etc/hadoop/conf
-
     # HA support by specifying multiple clusters
     # e.g.
 

+ 1 - 2
desktop/core/src/desktop/management/commands/test_windmill.py

@@ -21,7 +21,6 @@ after appropriate setup.
 import sys
 import time
 from optparse import make_option
-from hadoop import mini_cluster
 
 from django.core.management.base import BaseCommand
 from windmill.authoring import djangotest
@@ -73,7 +72,7 @@ class Command(BaseCommand):
     This currently doesn't start app-specific
     other servers.
     """
-    self.cluster = mini_cluster.shared_cluster(conf=True)
+    pass
 
   def stop_helper_servers(self):
     self.cluster.shutdown()

+ 6 - 101
desktop/libs/hadoop/src/hadoop/conf.py

@@ -16,7 +16,7 @@
 # limitations under the License.
 
 from django.utils.translation import ugettext_lazy as _t
-from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, validate_path, coerce_bool
+from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, coerce_bool
 import fnmatch
 import logging
 import os
@@ -39,26 +39,6 @@ def find_file_recursive(desired_glob, root):
   f.__doc__ = "Finds %s/%s" % (root, desired_glob)
   return f
 
-HADOOP_PLUGIN_CLASSPATH = Config("hadoop_plugin_classpath",
-  help="[Used only in testing code.] Path to the Hadoop plugin jar.",
-  type=str,
-  dynamic_default=find_file_recursive("hue-plugins-*.jar",
-                root=os.path.join(os.path.dirname(__file__), '..', '..', 'java-lib')),
-  private=True)
-
-SUDO_SHELL_JAR = Config("hadoop_sudo_shell_jar",
-  help="Tool that allows a proxy user UGI to be used to upload files.",
-  type=str,
-  dynamic_default=find_file_recursive("sudo-shell-*.jar",
-                root=os.path.join(os.path.dirname(__file__), '..', '..', 'sudo-shell', 'java-lib')),
-  private=True)
-
-CREDENTIALS_MERGER_JAR = Config("hadoop_credentials_merger_jar",
-  help="Tool that is capable of merging multiple files containing delegation tokens into one.",
-  type=str,
-  dynamic_default=find_file_recursive("credentials-merger-*.jar",
-                root=os.path.join(os.path.dirname(__file__), '..', '..', 'credentials-merger', 'java-lib')),
-  private=True)
 
 UPLOAD_CHUNK_SIZE = Config(
   key="upload_chunk_size",
@@ -89,31 +69,6 @@ HDFS_CLUSTERS = UnspecifiedConfigSection(
                               default=False, type=coerce_bool),
       TEMP_DIR=Config("temp_dir", help="HDFS directory for temporary files",
                       default='/tmp', type=str),
-
-      HADOOP_HDFS_HOME = Config(
-        key="hadoop_hdfs_home",
-        default=os.environ.get("HADOOP_HDFS_HOME", "/usr/lib/hadoop-hdfs"),
-        help=("Path to Hadoop HDFS home - HADOOP_HOME or HADOOP_HDFS_HOME in " +
-              "hadoop parlance. For tarball installations, it is the root of " +
-              "the untarred directory. For packages, " +
-              "it is /usr/lib/hadoop-hdfs." +
-              "Defaults to the environment varible HADOOP_BIN when set, " +
-              "or '/usr/bin/hadoop'."),
-      ),
-      HADOOP_BIN = Config(
-        key="hadoop_bin",
-        default=os.environ.get("HADOOP_BIN", "/usr/bin/hadoop"),
-        help=("Path to your Hadoop launcher script. E.g. /usr/bin/hadoop. " +
-              "Defaults to the environment varible HADOOP_BIN when set, " +
-              "or '/usr/bin/hadoop'.")
-      ),
-      HADOOP_CONF_DIR = Config(
-        key="hadoop_conf_dir",
-        default=os.environ.get("HADOOP_CONF_DIR", "/etc/hadoop/conf"),
-        help=("Directory to pass to hadoop_bin (from Hadoop configuration) " +
-              "as the --config flag. Defaults to the environment variable " +
-              "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")
-      ),
     )
   )
 )
@@ -140,29 +95,7 @@ MR_CLUSTERS = UnspecifiedConfigSection(
       SECURITY_ENABLED=Config("security_enabled", help="Is running with Kerberos authentication",
                               default=False, type=coerce_bool),
       SUBMIT_TO=Config('submit_to', help="Whether Hue should use this cluster to run jobs",
-                       default=True, type=coerce_bool), # Backward compatibility
-
-      HADOOP_MAPRED_HOME = Config(
-        key="hadoop_mapred_home",
-        default=os.environ.get("HADOOP_MR1_HOME", "/usr/lib/hadoop-0.20-mapreduce"),
-        help=("Path to directory holding Hadoop MR1 libs. " +
-              "E.g. /usr/lib/hadoop. Defaults to the environment variable " +
-              "HADOOP_MR1_HOME when set, or '/usr/lib/hadoop'.")
-      ),
-      HADOOP_BIN = Config(
-        key="hadoop_bin",
-        default=os.environ.get("HADOOP_MR1_BIN", "/usr/bin/hadoop"),
-        help=("Path to your Hadoop launcher script. E.g. /usr/bin/hadoop. " +
-              "Defaults to the environment varible HADOOP_MR1_BIN when set, " +
-              "or '/usr/bin/hadoop'.")
-      ),
-      HADOOP_CONF_DIR = Config(
-        key="hadoop_conf_dir",
-        default=os.environ.get("HADOOP_CONF_DIR", "/etc/hadoop/conf"),
-        help=("Directory to pass to hadoop_bin (from Hadoop configuration) " +
-              "as the --config flag. Defaults to the environment variable " +
-              "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")
-      ),
+                       default=True, type=coerce_bool), # True here for backward compatibility
     )
   )
 )
@@ -184,29 +117,7 @@ YARN_CLUSTERS = UnspecifiedConfigSection(
       SECURITY_ENABLED=Config("security_enabled", help="Is running with Kerberos authentication",
                               default=False, type=coerce_bool),
       SUBMIT_TO=Config('submit_to', help="Whether Hue should use this cluster to run jobs",
-                       default=False, type=coerce_bool), # Backward compatibility
-
-      HADOOP_MAPRED_HOME = Config(
-        key="hadoop_mapred_home",
-        default=os.environ.get("HADOOP_MR2_HOME", "/usr/lib/hadoop-mapreduce"),
-        help=("Path to directory holding Hadoop MR2 libs. " +
-              "E.g. /usr/lib/hadoop. Defaults to the environment " +
-              "variable HADOOP_MR2_HOME when set, or '/usr/lib/hadoop'.")
-      ),
-      HADOOP_BIN = Config(
-        key="hadoop_bin",
-        default=os.environ.get("HADOOP_MR2_BIN", "/usr/bin/hadoop"),
-        help=("Path to your Hadoop launcher script. E.g. /usr/bin/hadoop. " +
-              "Defaults to the environment varible HADOOP_MR2_BIN when set, " +
-              "or '/usr/bin/hadoop'.")
-      ),
-      HADOOP_CONF_DIR = Config(
-        key="hadoop_conf_dir",
-        default=os.environ.get("HADOOP_CONF_DIR", "/etc/hadoop/conf"),
-        help=("Directory to pass to hadoop_bin (from Hadoop configuration) " +
-              "as the --config flag. Defaults to the environment variable " +
-              "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")
-      ),
+                       default=False, type=coerce_bool), # False here for backward compatibility
       IS_YARN=Config("is_yarn", help="Attribute set only on YARN clusters and not MR1 ones.",
                      default=True, type=coerce_bool),
       RESOURCE_MANAGER_API_URL=Config("resourcemanager_api_url",
@@ -231,6 +142,8 @@ def config_validator(user):
   """
   from hadoop.fs import webhdfs
   from hadoop import job_tracker
+  from hadoop.yarn import tests
+
   res = []
   submit_to = []
 
@@ -238,9 +151,6 @@ def config_validator(user):
   has_default = False
   for name in HDFS_CLUSTERS.keys():
     cluster = HDFS_CLUSTERS[name]
-    res.extend(validate_path(cluster.HADOOP_HDFS_HOME, is_dir=True))
-    res.extend(validate_path(cluster.HADOOP_CONF_DIR, is_dir=True))
-    res.extend(validate_path(cluster.HADOOP_BIN, is_dir=False))
     res.extend(webhdfs.test_fs_configuration(cluster))
     if name == 'default':
       has_default = True
@@ -252,9 +162,6 @@ def config_validator(user):
   for name in MR_CLUSTERS.keys():
     cluster = MR_CLUSTERS[name]
     if cluster.SUBMIT_TO.get():
-      res.extend(validate_path(cluster.HADOOP_MAPRED_HOME, is_dir=True))
-      res.extend(validate_path(cluster.HADOOP_CONF_DIR, is_dir=True))
-      res.extend(validate_path(cluster.HADOOP_BIN, is_dir=False))
       mr_down.extend(job_tracker.test_jt_configuration(cluster))
       submit_to.append('mapred_clusters.' + name)
   # If HA still failing
@@ -265,9 +172,7 @@ def config_validator(user):
   for name in YARN_CLUSTERS.keys():
     cluster = YARN_CLUSTERS[name]
     if cluster.SUBMIT_TO.get():
-      res.extend(validate_path(cluster.HADOOP_MAPRED_HOME, is_dir=True))
-      res.extend(validate_path(cluster.HADOOP_CONF_DIR, is_dir=True))
-      res.extend(validate_path(cluster.HADOOP_BIN, is_dir=False))
+      res.extend(tests.test_yarn_configurations())
       submit_to.append('yarn_clusters.' + name)
 
   if not submit_to:

+ 6 - 3
desktop/libs/hadoop/src/hadoop/core_site.py

@@ -14,8 +14,9 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 """
-Helper for reading core-site.xml
+Deprecated: not used anymore and will be empty
 """
 
 import errno
@@ -55,10 +56,12 @@ def _parse_core_site():
   global _CORE_SITE_PATH
 
   for indentifier in conf.HDFS_CLUSTERS.get():
-    _CORE_SITE_PATH = os.path.join(conf.HDFS_CLUSTERS[indentifier].HADOOP_CONF_DIR.get(), 'core-site.xml')
     try:
+      _CORE_SITE_PATH = os.path.join(conf.HDFS_CLUSTERS[indentifier].HADOOP_CONF_DIR.get(), 'core-site.xml') # Will KeyError and be empty as HADOOP_CONF_DIR does not exist anymore
       data = file(_CORE_SITE_PATH, 'r').read()
       break
+    except KeyError:
+      data = ""
     except IOError, err:
       if err.errno != errno.ENOENT:
         LOG.error('Cannot read from "%s": %s' % (_CORE_SITE_PATH, err))
@@ -75,4 +78,4 @@ def get_trash_interval():
 
   Also indicates whether trash is enabled or not.
   """
-  return get_conf().get(_CNF_TRASH_INTERVAL, None)
+  return get_conf().get(_CNF_TRASH_INTERVAL)

+ 3 - 9
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -282,7 +282,7 @@ class WebHdfs(Hdfs):
 
   def remove(self, path, skip_trash=False):
     """Delete a file."""
-    if hadoop.core_site.get_trash_interval() is None or skip_trash:
+    if skip_trash:
       self._delete(path, recursive=False)
     else:
       self._trash(path, recursive=False)
@@ -293,7 +293,7 @@ class WebHdfs(Hdfs):
 
   def rmtree(self, path, skip_trash=False):
     """Delete a tree recursively."""
-    if hadoop.core_site.get_trash_interval() is None or skip_trash:
+    if skip_trash:
       self._delete(path, recursive=True)
     else:
       self._trash(path, recursive=True)
@@ -306,9 +306,6 @@ class WebHdfs(Hdfs):
     Removing the root from ``path`` will provide the original path.
     Ensure parent directories exist and rename path.
     """
-    if hadoop.core_site.get_trash_interval() is None:
-      raise IOError(errno.EPERM, _("Trash is not enabled."))
-
     if not path.startswith(self.trash_path):
       raise IOError(errno.EPERM, _("File %s is not in trash") % path)
 
@@ -333,9 +330,6 @@ class WebHdfs(Hdfs):
 
     Purge all trash in users ``trash_path``
     """
-    if hadoop.core_site.get_trash_interval() is None:
-      raise IOError(errno.EPERM, _("Trash is not enabled."))
-
     for timestamped_directory in self.listdir(self.trash_path):
       self.rmtree(self.join(self.trash_path, timestamped_directory), True)
 
@@ -782,7 +776,7 @@ def test_fs_configuration(fs_config):
             _('Failed to create temporary file "%s"') % tmpname)]
 
   # Check superuser has super power
-  try:  # Finally: delete tmpname
+  try:
     try:
       fs.chown(tmpname, fs.superuser)
     except Exception, ex:

+ 1 - 3
desktop/libs/hadoop/src/hadoop/pseudo_hdfs4.py

@@ -439,7 +439,7 @@ class PseudoHdfs4(object):
     yarn_configs = {
       'yarn.resourcemanager.resource-tracker.address': '%s:%s' % (self._fqdn, self._rm_resource_port,),
       'yarn.resourcemanager.address': '%s:%s' % (self._fqdn, self._rm_port,),
-      'yarn.resourcemanager.scheduler.address': '%s:%s' % (self._fqdn, 8030,), #self._rm_scheduler_port # /!\ Hardcoded for now
+      'yarn.resourcemanager.scheduler.address': '%s:%s' % (self._fqdn, self._rm_scheduler_port,),
       'yarn.resourcemanager.scheduler.class': 'org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler',
       'yarn.resourcemanager.admin.address': '%s:%s' % (self._fqdn, self._rm_admin_port,),
       'yarn.resourcemanager.webapp.address': '%s:%s' % (self._fqdn, self._rm_webapp_port,),
@@ -515,9 +515,7 @@ def shared_cluster():
     closers = [
       hadoop.conf.HDFS_CLUSTERS['default'].FS_DEFAULTFS.set_for_testing(cluster.fs_default_name),
       hadoop.conf.HDFS_CLUSTERS['default'].WEBHDFS_URL.set_for_testing(webhdfs_url),
-      hadoop.conf.HDFS_CLUSTERS['default'].HADOOP_CONF_DIR.set_for_testing(cluster.hadoop_conf_dir),
 
-      hadoop.conf.YARN_CLUSTERS['default'].HADOOP_CONF_DIR.set_for_testing(cluster.hadoop_conf_dir),
       hadoop.conf.YARN_CLUSTERS['default'].HOST.set_for_testing(fqdn),
       hadoop.conf.YARN_CLUSTERS['default'].PORT.set_for_testing(cluster._rm_port),
 

+ 40 - 0
desktop/libs/hadoop/src/hadoop/yarn/tests.py

@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from hadoop.yarn.resource_manager_api import get_resource_manager
+
+
+LOG = logging.getLogger(__name__)
+
+
+def test_yarn_configurations():
+  # Single cluster for now
+
+  result = []
+
+  try:
+    url = ''
+    api = get_resource_manager()
+    url = api._url
+    api.apps()
+  except Exception, e:
+    msg = 'Failed to contact Resource Manager at %s: %s' % (url, e)
+    result.append(('Resource Manager', msg))
+
+  return result