Browse Source

HUE-2614 [hadoop] Read the list of HDFS superuser groups from hdfs-site.xml

krish 10 years ago
parent
commit
7b702bfe97

+ 2 - 2
apps/filebrowser/src/filebrowser/templates/editor_components.mako

@@ -125,7 +125,7 @@
         <option>${choice}</option>
       % endif
     % endfor
-    % if is_superuser:
+    % if is_fs_superuser:
       % if seen or not current_value:
         <option value="__other__">Other</option>
       % else:
@@ -134,7 +134,7 @@
     % endif
 
     </select>
-    % if is_superuser:
+    % if is_fs_superuser:
       % if seen or not current_value:
         <input name="${other_key}" class="hide">
       % else:

+ 3 - 3
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -178,10 +178,10 @@ from django.utils.translation import ugettext as _
   </div>
 
   <!-- chown modal -->
-  % if is_superuser:
+  % if is_fs_superuser:
   <div id="changeOwnerModal" class="modal hide fade">
     <%
-      select_filter = is_superuser and 'SelectWithOther' or ''
+      select_filter = is_fs_superuser and 'SelectWithOther' or ''
     %>
     <form id="chownForm" action="/filebrowser/chown" method="POST" enctype="multipart/form-data" class="form-stacked form-padding-fix">
       ${ csrf_token(request) | n,unicode }
@@ -191,7 +191,7 @@ from django.utils.translation import ugettext as _
       </div>
 
       <div class="modal-body change-owner-modal-body clearfix" >
-        <div class="alert alert-message block-message info">${_('Note: Only the Hadoop superuser, "%(superuser)s" on this file system, may change the owner of a file.') % dict(superuser=superuser)}</div>
+        <div class="alert alert-message block-message info">${_('Note: Only the Hadoop superuser, "%(superuser)s" or the HDFS supergroup, "%(supergroup)s" on this file system, may change the owner of a file.') % dict(superuser=superuser, supergroup=supergroup)}</div>
         <div style="padding-left: 15px; padding-bottom: 10px;">
           <label>${_('User')}</label>
           ${ edit.selection("user", users, user.username, "user_other") }

+ 11 - 7
apps/filebrowser/src/filebrowser/views.py

@@ -424,6 +424,7 @@ def listdir_paged(request, path):
 
     page.object_list = [ _massage_stats(request, s) for s in shown_stats ]
 
+    is_fs_superuser = _is_hdfs_superuser(request)
     data = {
         'path': path,
         'breadcrumbs': breadcrumbs,
@@ -438,11 +439,11 @@ def listdir_paged(request, path):
         'cwd_set': True,
         'file_filter': 'any',
         'current_dir_path': path,
-        'is_fs_superuser': request.user.username == request.fs.superuser,
-        'is_superuser': request.user.username == request.fs.superuser,
-        'groups': request.user.username == request.fs.superuser and [str(x) for x in Group.objects.values_list('name', flat=True)] or [],
-        'users': request.user.username == request.fs.superuser and [str(x) for x in User.objects.values_list('username', flat=True)] or [],
+        'is_fs_superuser': is_fs_superuser,
+        'groups': is_fs_superuser and [str(x) for x in Group.objects.values_list('name', flat=True)] or [],
+        'users': is_fs_superuser and [str(x) for x in User.objects.values_list('username', flat=True)] or [],
         'superuser': request.fs.superuser,
+        'supergroup': request.fs.supergroup,
         'is_sentry_managed': request.fs.is_sentry_managed(path)
     }
     return render('listdir.mako', request, data)
@@ -944,9 +945,9 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
                 op(*args)
             except (IOError, WebHdfsException), e:
                 msg = _("Cannot perform operation.")
-                if request.user.is_superuser and not request.user == request.fs.superuser:
-                    msg += _(' Note: you are a Hue admin but not a HDFS superuser (which is "%(superuser)s").') \
-                           % {'superuser': request.fs.superuser}
+                if request.user.is_superuser and not _is_hdfs_superuser(request):
+                    msg += _(' Note: you are a Hue admin but not a HDFS superuser, "%(superuser)s" or part of HDFS supergroup, "%(supergroup)s".') \
+                           % {'superuser': request.fs.superuser, 'supergroup': request.fs.supergroup}
                 raise PopupException(msg, detail=e)
             if next:
                 logging.debug("Next: %s" % next)
@@ -1312,3 +1313,6 @@ def truncate(toTruncate, charsToKeep=50):
         return truncated
     else:
         return toTruncate
+
+def _is_hdfs_superuser(request):
+  return request.user.username == request.fs.superuser or request.user.groups.filter(name__exact=request.fs.supergroup).exists()

+ 7 - 0
apps/filebrowser/src/filebrowser/views_test.py

@@ -23,6 +23,7 @@ import re
 import urlparse
 from avro import schema, datafile, io
 
+from django.core.urlresolvers import reverse
 from django.utils.encoding import smart_str
 from nose.plugins.attrib import attr
 from nose.plugins.skip import SkipTest
@@ -358,6 +359,12 @@ def test_chmod_sticky():
 def test_chown():
   cluster = pseudo_hdfs4.shared_cluster()
 
+  # Login as Non Hadoop superuser
+  c = make_logged_in_client('test')
+
+  response = c.post(reverse('index'))
+  assert_false('Change owner' in response.content)
+
   # Only the Hadoop superuser really has carte blanche here
   c = make_logged_in_client(cluster.superuser)
   cluster.fs.setuser(cluster.superuser)

+ 10 - 3
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -34,7 +34,7 @@ from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.webhdfs_types import WebHdfsStat, WebHdfsContentSummary
 from hadoop.conf import UPLOAD_CHUNK_SIZE
-from hadoop.hdfs_site import get_nn_sentry_prefixes, get_umask_mode
+from hadoop.hdfs_site import get_nn_sentry_prefixes, get_umask_mode, get_supergroup
 
 import hadoop.conf
 import desktop.conf
@@ -61,7 +61,8 @@ class WebHdfs(Hdfs):
                security_enabled=False,
                ssl_cert_ca_verify=True,
                temp_dir="/tmp",
-               umask=01022):
+               umask=01022,
+               hdfs_supergroup=None):
     self._url = url
     self._superuser = hdfs_superuser
     self._security_enabled = security_enabled
@@ -70,6 +71,7 @@ class WebHdfs(Hdfs):
     self._umask = umask
     self._fs_defaultfs = fs_defaultfs
     self._logical_name = logical_name
+    self._supergroup = hdfs_supergroup
 
     self._client = self._make_client(url, security_enabled, ssl_cert_ca_verify)
     self._root = resource.Resource(self._client)
@@ -89,7 +91,8 @@ class WebHdfs(Hdfs):
                security_enabled=hdfs_config.SECURITY_ENABLED.get(),
                ssl_cert_ca_verify=hdfs_config.SSL_CERT_CA_VERIFY.get(),
                temp_dir=hdfs_config.TEMP_DIR.get(),
-               umask=get_umask_mode())
+               umask=get_umask_mode(),
+               hdfs_supergroup=get_supergroup())
 
   def __str__(self):
     return "WebHdfs at %s" % self._url
@@ -126,6 +129,10 @@ class WebHdfs(Hdfs):
   def umask(self):
     return self._umask
 
+  @property
+  def supergroup(self):
+    return self._supergroup
+
   @property
   def security_enabled(self):
     return self._security_enabled

+ 4 - 0
desktop/libs/hadoop/src/hadoop/hdfs_site.py

@@ -30,6 +30,7 @@ _HDFS_SITE_DICT = None
 
 _CNF_NN_PERMISSIONS_UMASK_MODE = 'fs.permissions.umask-mode'
 _CNF_NN_SENTRY_PREFIX = 'sentry.authorization-provider.hdfs-path-prefixes'
+_CNF_NN_PERMISSIONS_SUPERGROUP = 'dfs.permissions.superusergroup'
 
 
 def reset():
@@ -53,6 +54,9 @@ def get_umask_mode():
 def get_nn_sentry_prefixes():
   return get_conf().get(_CNF_NN_SENTRY_PREFIX, '')
 
+def get_supergroup():
+  return get_conf().get(_CNF_NN_PERMISSIONS_SUPERGROUP, 'supergroup')
+
 
 def _parse_hdfs_site():
   global _HDFS_SITE_DICT