浏览代码

HUE-7248 [adls] Ability to browse data / content in ADLS

jdesjean 8 年之前
父节点
当前提交
91bde74
共有 47 个文件被更改,包括 1674 次插入125 次删除
  1. 1 0
      apps/filebrowser/src/filebrowser/settings.py
  2. 6 0
      apps/filebrowser/src/filebrowser/templates/fb_components.mako
  3. 10 11
      apps/filebrowser/src/filebrowser/templates/listdir.mako
  4. 29 13
      apps/filebrowser/src/filebrowser/templates/listdir_components.mako
  5. 19 7
      apps/filebrowser/src/filebrowser/views.py
  6. 1 1
      apps/metastore/src/metastore/views.py
  7. 1 0
      apps/useradmin/src/useradmin/models.py
  8. 1 0
      desktop/Makefile
  9. 23 0
      desktop/conf.dist/hue.ini
  10. 23 0
      desktop/conf/pseudo-distributed.ini.tmpl
  11. 3 1
      desktop/core/src/desktop/lib/fs/__init__.py
  12. 6 0
      desktop/core/src/desktop/lib/fs/proxyfs.py
  13. 4 1
      desktop/core/src/desktop/lib/fsmanager.py
  14. 2 2
      desktop/core/src/desktop/lib/rest/resource.py
  15. 10 0
      desktop/core/src/desktop/models.py
  16. 64 2
      desktop/core/src/desktop/static/desktop/js/apiHelper.js
  17. 258 0
      desktop/core/src/desktop/static/desktop/js/assist/assistAdlsEntry.js
  18. 0 1
      desktop/core/src/desktop/static/desktop/js/assist/assistHdfsEntry.js
  19. 79 21
      desktop/core/src/desktop/static/desktop/js/jquery.filechooser.js
  20. 11 1
      desktop/core/src/desktop/static/desktop/js/ko.hue-bindings.js
  21. 141 2
      desktop/core/src/desktop/templates/assist.mako
  22. 3 0
      desktop/libs/aws/src/aws/s3/s3fs.py
  23. 34 0
      desktop/libs/azure/Makefile
  24. 1 0
      desktop/libs/azure/babel.cfg
  25. 1 0
      desktop/libs/azure/hueversion.py
  26. 29 0
      desktop/libs/azure/setup.py
  27. 15 0
      desktop/libs/azure/src/azure/__init__.py
  28. 65 0
      desktop/libs/azure/src/azure/active_directory.py
  29. 15 0
      desktop/libs/azure/src/azure/adls/__init__.py
  30. 86 0
      desktop/libs/azure/src/azure/adls/webhdfs.py
  31. 56 0
      desktop/libs/azure/src/azure/client.py
  32. 112 0
      desktop/libs/azure/src/azure/conf.py
  33. 40 0
      desktop/libs/azure/src/azure/locale/de/LC_MESSAGES/django.po
  34. 40 0
      desktop/libs/azure/src/azure/locale/en/LC_MESSAGES/django.po
  35. 39 0
      desktop/libs/azure/src/azure/locale/en_US.pot
  36. 40 0
      desktop/libs/azure/src/azure/locale/es/LC_MESSAGES/django.po
  37. 40 0
      desktop/libs/azure/src/azure/locale/fr/LC_MESSAGES/django.po
  38. 40 0
      desktop/libs/azure/src/azure/locale/ja/LC_MESSAGES/django.po
  39. 40 0
      desktop/libs/azure/src/azure/locale/ko/LC_MESSAGES/django.po
  40. 40 0
      desktop/libs/azure/src/azure/locale/pt/LC_MESSAGES/django.po
  41. 40 0
      desktop/libs/azure/src/azure/locale/pt_BR/LC_MESSAGES/django.po
  42. 40 0
      desktop/libs/azure/src/azure/locale/zh_CN/LC_MESSAGES/django.po
  43. 34 0
      desktop/libs/hadoop/src/hadoop/core_site.py
  44. 5 1
      desktop/libs/hadoop/src/hadoop/fs/upload.py
  45. 123 61
      desktop/libs/hadoop/src/hadoop/fs/webhdfs.py
  46. 2 0
      desktop/libs/indexer/src/indexer/api3.py
  47. 2 0
      desktop/libs/notebook/src/notebook/api.py

+ 1 - 0
apps/filebrowser/src/filebrowser/settings.py

@@ -23,4 +23,5 @@ MENU_INDEX = 20
 
 PERMISSION_ACTIONS = (
   ("s3_access", "Access to S3 from filebrowser and filepicker."),
+  ("adls_access", "Access to ADLS from filebrowser and filepicker.")
 )

+ 6 - 0
apps/filebrowser/src/filebrowser/templates/fb_components.mako

@@ -31,6 +31,12 @@ from aws import get_client
               <i class="fa fa-fw fa-cubes"></i> ${ get_client()._region }
             </span>
           </li>
+        %elif path.lower().find('adl:/') == 0:
+          <li style="padding-top: 12px">
+            <span class="breadcrumb-link homeLink">
+              <svg class="hi"><use xlink:href='#hi-adls'></use></svg>
+            </span>
+          </li>
         %else:
           <li><a class="pointer breadcrumb-link homeLink" data-bind="click: $root.openHome, attr:{'href': '${url('filebrowser.views.view', path=urlencode(path))}?default_to_home'}">
             <i class="fa fa-home"></i> ${_('Home')}</a>

+ 10 - 11
apps/filebrowser/src/filebrowser/templates/listdir.mako

@@ -75,7 +75,7 @@ ${ fb_components.menubar() }
                 </a>
               </li>
               % endif
-              <li class="divider" data-bind="visible: !isS3()"></li>
+              <li class="divider" data-bind="visible: isPermissionEnabled()"></li>
               % if is_fs_superuser:
               <li data-bind="css: {'disabled': isCurrentDirSentryManaged() || selectedSentryFiles().length > 0 }">
                 <a href="javascript: void(0)" data-bind="visible: ! inTrash(), click: changeOwner, enable: selectedFiles().length > 0">
@@ -83,19 +83,19 @@ ${ fb_components.menubar() }
                 </a>
               </li>
               % endif
-              <li data-bind="css: {'disabled': isCurrentDirSentryManaged() || selectedSentryFiles().length > 0 }, visible: !isS3()">
+              <li data-bind="css: {'disabled': isCurrentDirSentryManaged() || selectedSentryFiles().length > 0 }, visible: isPermissionEnabled()">
                 <a href="javascript: void(0)" data-bind="visible: ! inTrash(), click: changePermissions, enable: selectedFiles().length > 0">
                   <i class="fa fa-fw fa-list-alt"></i> ${_('Change permissions')}
                 </a>
               </li>
-              <li class="divider" data-bind="visible: !isS3()"></li>
-              <li data-bind="css: {'disabled': inTrash() || selectedFiles().length > 1 }, visible: !isS3()">
+              <li class="divider" data-bind="visible: isCompressEnabled() || isReplicationEnabled() || isSummaryEnabled()"></li>
+              <li data-bind="css: {'disabled': inTrash() || selectedFiles().length > 1 }, visible: isSummaryEnabled()">
                 <a class="pointer" data-bind="click: function(){ selectedFiles().length == 1 ? showSummary(): void(0)}">
                   <i class="fa fa-fw fa-pie-chart"></i> ${_('Summary')}
                 </a>
               </li>
               <li>
-                <a href="javascript: void(0)" title="${_('Set Replication')}" data-bind="visible: !inTrash() && !isS3() && selectedFiles().length == 1 && selectedFile().type == 'file', click: setReplicationFactor">
+                <a href="javascript: void(0)" title="${_('Set Replication')}" data-bind="visible: !inTrash() && isReplicationEnabled() && selectedFiles().length == 1 && selectedFile().type == 'file', click: setReplicationFactor">
                   <i class="fa fa-fw fa-hdd-o"></i> ${_('Set replication')}
                 </a>
               </li>
@@ -103,7 +103,7 @@ ${ fb_components.menubar() }
                 <li><a href="javascript: void(0)" title="${_('Compress selection into a single archive')}" data-bind="click: function() { setCompressArchiveDefault(); confirmCompressFiles();}, visible: showCompressButton">
                   <i class="fa fa-fw fa-file-archive-o"></i> ${_('Compress')}</a>
                 </li>
-                <li><a href="javascript: void(0)" title="${_('Extract selected archive')}" data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && !isS3(), click: confirmExtractArchive">
+                <li><a href="javascript: void(0)" title="${_('Extract selected archive')}" data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && isCompressEnabled(), click: confirmExtractArchive">
                   <i class="fa fa-fw fa-file-archive-o"></i> ${_('Extract')}</a>
                 </li>
               % endif
@@ -112,10 +112,9 @@ ${ fb_components.menubar() }
 
           <button class="btn fileToolbarBtn" title="${_('Restore from trash')}" data-bind="visible: inRestorableTrash(), click: restoreTrashSelected, enable: selectedFiles().length > 0 && isCurrentDirSelected().length == 0"><i class="fa fa-cloud-upload"></i> ${_('Restore')}</button>
           <!-- ko ifnot: inTrash -->
-          <!-- ko if: $root.isS3 -->
+          % if not is_trash_enabled:
           <button class="btn fileToolbarBtn delete-link" title="${_('Delete forever')}" data-bind="enable: selectedFiles().length > 0, click: deleteSelected"><i class="fa fa-bolt"></i> ${_('Delete forever')}</button>
-          <!-- /ko -->
-          <!-- ko ifnot: $root.isS3 -->
+          % else:
           <div id="delete-dropdown" class="btn-group" style="vertical-align: middle">
             <button id="trash-btn" class="btn toolbarBtn" data-bind="enable: selectedFiles().length > 0 && isCurrentDirSelected().length == 0, click: trashSelected"><i class="fa fa-times"></i> ${_('Move to trash')}</button>
             <button id="trash-btn-caret" class="btn toolbarBtn dropdown-toggle" data-toggle="dropdown" data-bind="enable: selectedFiles().length > 0 && isCurrentDirSelected().length == 0">
@@ -125,7 +124,7 @@ ${ fb_components.menubar() }
               <li><a href="javascript: void(0)" class="delete-link" title="${_('Delete forever')}" data-bind="enable: selectedFiles().length > 0, click: deleteSelected"><i class="fa fa-bolt"></i> ${_('Delete forever')}</a></li>
             </ul>
           </div>
-          <!-- /ko -->
+          % endif
           <!-- /ko -->
           % if 'oozie' in apps:
             <button class="btn fileToolbarBtn" title="${_('Submit')}"
@@ -134,7 +133,7 @@ ${ fb_components.menubar() }
             </button>
             % if ENABLE_EXTRACT_UPLOADED_ARCHIVE.get():
               <button class="btn extractArchiveBtn" title="${_('Extract zip, tar.gz, bz2 or bzip2')}"
-                data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && !isS3(), click: confirmExtractArchive">
+                data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && isCompressEnabled(), click: confirmExtractArchive">
                 <i class="fa fa-fw fa-file-archive-o"></i> ${_('Extract')}
               </button>
             % endif

+ 29 - 13
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -572,32 +572,32 @@ from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
     <a href="javascript: void(0)" title="${_('Download')}" data-bind="click: (!$root.inTrash() && $root.selectedFiles().length == 1 && selectedFile().type == 'file') ? $root.downloadFile: void(0)">
     <i class="fa fa-fw fa-arrow-circle-o-down"></i> ${_('Download')}</a></li>
     % endif
-    <li class="divider" data-bind="visible: !isS3()"></li>
+    <li class="divider" data-bind="visible: isPermissionEnabled()"></li>
     % if is_fs_superuser:
-    <li data-bind="css: {'disabled': $root.isCurrentDirSentryManaged || selectedSentryFiles().length > 0 }, visible: !isS3()">
+    <li data-bind="css: {'disabled': $root.isCurrentDirSentryManaged || selectedSentryFiles().length > 0 }, visible: isPermissionEnabled()">
       <a href="javascript: void(0)" data-bind="visible: !$root.inTrash(), click: $root.changeOwner, enable: $root.selectedFiles().length > 0">
         <i class="fa fa-fw fa-user"></i> ${_('Change owner / group')}
       </a>
     </li>
     % endif
-    <li data-bind="css: {'disabled': $root.isCurrentDirSentryManaged() || selectedSentryFiles().length > 0 }, visible: !isS3()">
+    <li data-bind="css: {'disabled': $root.isCurrentDirSentryManaged() || selectedSentryFiles().length > 0 }, visible: isPermissionEnabled()">
       <a href="javascript: void(0)" data-bind="visible: !$root.inTrash(), click: $root.changePermissions, enable: $root.selectedFiles().length > 0">
         <i class="fa fa-fw fa-list-alt"></i> ${_('Change permissions')}
       </a>
     </li>
-    <li class="divider" data-bind="visible: !isS3() || (isS3() && !isS3Root())"></li>
+    <li class="divider" data-bind="visible: isCompressEnabled() || isReplicationEnabled() || isSummaryEnabled()"></li>
     % if is_trash_enabled:
     <li data-bind="css: {'disabled': $root.selectedFiles().length == 0 || isCurrentDirSelected().length > 0}">
     <a href="javascript: void(0)" data-bind="click: ($root.selectedFiles().length > 0 && isCurrentDirSelected().length == 0) ? $root.trashSelected: void(0)">
     <i class="fa fa-fw fa-times"></i> ${_('Move to trash')}</a></li>
-    %endif
+    % endif
     <li><a href="javascript: void(0)" class="delete-link" title="${_('Delete forever')}" data-bind="enable: $root.selectedFiles().length > 0, click: $root.deleteSelected"><i class="fa fa-fw fa-bolt"></i> ${_('Delete forever')}</a></li>
-    <li class="divider" data-bind="visible: !isS3()"></li>
-    <li data-bind="css: {'disabled': selectedFiles().length > 1 }, visible: !isS3()">
+    <li class="divider" data-bind="visible: isSummaryEnabled()"></li>
+    <li data-bind="css: {'disabled': selectedFiles().length > 1 }, visible: isSummaryEnabled()">
       <a class="pointer" data-bind="click: function(){ selectedFiles().length == 1 ? showSummary(): void(0)}"><i class="fa fa-fw fa-pie-chart"></i> ${_('Summary')}</a>
     </li>
-    <li data-bind="css: {'disabled': inTrash() || isS3() || selectedFiles().length != 1 || selectedFile().type != 'file'}">
-      <a href="javascript: void(0)" title="${_('Set Replication')}" data-bind="click: (!inTrash() && !isS3() && selectedFiles().length == 1 && selectedFile().type == 'file') ? setReplicationFactor: void(0)">
+    <li data-bind="css: {'disabled': inTrash() || !isReplicationEnabled() || selectedFiles().length != 1 || selectedFile().type != 'file'}">
+      <a href="javascript: void(0)" title="${_('Set Replication')}" data-bind="click: (!inTrash() && isReplicationEnabled() && selectedFiles().length == 1 && selectedFile().type == 'file') ? setReplicationFactor: void(0)">
         <i class="fa fa-fw fa-hdd-o"></i> ${_('Set replication')}
       </a>
     </li>
@@ -606,8 +606,8 @@ from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
         <a href="javascript: void(0)" title="${_('Compress selection into a single archive')}" data-bind="click: function() { setCompressArchiveDefault(); confirmCompressFiles();}, visible: showCompressButton">
         <i class="fa fa-fw fa-file-archive-o"></i> ${_('Compress')}</a>
       </li>
-      <li data-bind="css: {'disabled': selectedFiles().length != 1 || !isArchive(selectedFile().name) || isS3()}">
-        <a href="javascript: void(0)" title="${_('Extract selected archive')}" data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && !isS3(), click: confirmExtractArchive">
+      <li data-bind="css: {'disabled': selectedFiles().length != 1 || !isArchive(selectedFile().name) || !isCompressEnabled()}">
+        <a href="javascript: void(0)" title="${_('Extract selected archive')}" data-bind="visible: selectedFiles().length == 1 && isArchive(selectedFile().name) && isCompressEnabled(), click: confirmExtractArchive">
         <i class="fa fa-fw fa-file-archive-o"></i> ${_('Extract')}</a>
       </li>
     % endif
@@ -1049,6 +1049,23 @@ from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
         return self.currentPath().toLowerCase().indexOf('s3a://') === 0;
       });
 
+      self.isHdfs = ko.pureComputed(function () {
+        var currentPath = self.currentPath().toLowerCase();
+        return currentPath.indexOf('/') === 0 || currentPath.indexOf('hdfs') === 0
+      });
+      self.isCompressEnabled = ko.pureComputed(function () {
+        return self.isHdfs();
+      });
+      self.isSummaryEnabled = ko.pureComputed(function () {
+        return self.isHdfs();
+      });
+      self.isPermissionEnabled = ko.pureComputed(function () {
+        return !self.isS3();
+      });
+      self.isReplicationEnabled = ko.pureComputed(function () {
+        return self.isHdfs();
+      });
+
       self.isS3.subscribe(function (newVal) {
         if (newVal) {
           huePubSub.publish('update.autocompleters');
@@ -1138,7 +1155,6 @@ from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
         self.isCurrentDirSentryManaged(isSentryManaged);
 
         self.page(new Page(page));
-
         self.files(ko.utils.arrayMap(files, function (file) {
           file.highlighted = self.filesToHighlight.indexOf(file.path) > -1;
           var f = new File(file);
@@ -1636,7 +1652,7 @@ from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
         if (fileNames.indexOf('.') !== -1) {
           return false;
         }
-        return !self.isS3() && (self.selectedFiles().length > 1 || !(self.selectedFiles().length === 1 && self.isArchive(self.selectedFile().name)));
+        return self.isHdfs() && (self.selectedFiles().length > 1 || !(self.selectedFiles().length === 1 && self.isArchive(self.selectedFile().name)));
       });
 
       self.setCompressArchiveDefault = function() {

+ 19 - 7
apps/filebrowser/src/filebrowser/views.py

@@ -26,6 +26,7 @@ import re
 import shutil
 import stat as stat_module
 import urllib
+from urlparse import urlparse
 
 from bz2 import decompress
 from datetime import datetime
@@ -386,7 +387,7 @@ def listdir(request, path):
         parent_stat['path'] = parent_path
         stats.insert(0, parent_stat)
 
-    data['files'] = [_massage_stats(request, stat) for stat in stats]
+    data['files'] = [_massage_stats(request, stat_absolute_path(path, stat)) for stat in stats]
     return render('listdir.mako', request, data)
 
 def _massage_page(page):
@@ -482,7 +483,7 @@ def listdir_paged(request, path):
     shown_stats.insert(1, current_stat)
 
     if page:
-      page.object_list = [ _massage_stats(request, s) for s in shown_stats ]
+      page.object_list = [ _massage_stats(request, stat_absolute_path(path, s)) for s in shown_stats ]
 
     is_trash_enabled = request.fs._get_scheme(path) == 'hdfs' and \
                        (request.fs.isdir(_home_trash_path(request.fs, request.user, path)) or
@@ -516,6 +517,16 @@ def listdir_paged(request, path):
     }
     return render('listdir.mako', request, data)
 
+def scheme_absolute_path(root, path):
+  splitPath = urlparse(path)
+  splitRoot = urlparse(root)
+  if splitRoot.scheme and not splitPath.scheme:
+    path = splitPath._replace(scheme=splitRoot.scheme).geturl()
+  return path
+
+def stat_absolute_path(path, stat):
+  stat["path"] = scheme_absolute_path(path, stat["path"])
+  return stat
 
 def _massage_stats(request, stats):
     """
@@ -548,7 +559,7 @@ def stat(request, path):
     if not request.fs.exists(path):
         raise Http404(_("File not found: %(path)s") % {'path': escape(path)})
     stats = request.fs.stats(path)
-    return JsonResponse(_massage_stats(request, stats))
+    return JsonResponse(_massage_stats(request, stat_absolute_path(path, stats)))
 
 
 def content_summary(request, path):
@@ -649,7 +660,8 @@ def display(request, path):
 
     dirname = posixpath.dirname(path)
     # Start with index-like data:
-    data = _massage_stats(request, request.fs.stats(path))
+    stats = request.fs.stats(path)
+    data = _massage_stats(request, stat_absolute_path(path, stats))
     data["is_embeddable"] = request.GET.get('is_embeddable', False)
     # And add a view structure:
     data["success"] = True
@@ -1065,7 +1077,7 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
             try:
                 if piggyback:
                     piggy_path = form.cleaned_data[piggyback]
-                    ret["result"] = _massage_stats(request, request.fs.stats(piggy_path))
+                    ret["result"] = _massage_stats(request, stat_absolute_path(piggy_path ,request.fs.stats(piggy_path)))
             except Exception, e:
                 # Hard to report these more naturally here.  These happen either
                 # because of a bug in the piggy-back code or because of a
@@ -1264,7 +1276,7 @@ def _upload_file(request):
 
     if form.is_valid():
         uploaded_file = request.FILES['hdfs_file']
-        dest = form.cleaned_data['dest']
+        dest = scheme_absolute_path(request.GET['dest'], form.cleaned_data['dest'])
         filepath = request.fs.join(dest, uploaded_file.name)
 
         if request.fs.isdir(dest) and posixpath.sep in uploaded_file.name:
@@ -1288,7 +1300,7 @@ def _upload_file(request):
 
         response.update({
           'path': filepath,
-          'result': _massage_stats(request, request.fs.stats(filepath)),
+          'result': _massage_stats(request, stat_absolute_path(filepath, request.fs.stats(filepath))),
           'next': request.GET.get("next")
         })
 

+ 1 - 1
apps/metastore/src/metastore/views.py

@@ -36,6 +36,7 @@ from beeswax.models import SavedQuery
 from beeswax.server import dbms
 from beeswax.server.dbms import get_query_server_config
 from filebrowser.views import location_to_url
+from urlparse import urlparse
 from metadata.conf import has_optimizer, has_navigator, get_optimizer_url, get_navigator_url
 from notebook.connectors.base import Notebook, QueryError
 from notebook.models import make_notebook
@@ -443,7 +444,6 @@ def read_table(request, database, table):
   except Exception, e:
     raise PopupException(_('Cannot read table'), detail=e)
 
-
 @check_has_write_access_permission
 def load_table(request, database, table):
   response = {'status': -1, 'data': 'None'}

+ 1 - 0
apps/useradmin/src/useradmin/models.py

@@ -281,6 +281,7 @@ def update_app_permissions(**kwargs):
            not (new_dp.app == 'hbase' and new_dp.action == 'write') and \
            not (new_dp.app == 'security' and new_dp.action == 'impersonate') and \
            not (new_dp.app == 'filebrowser' and new_dp.action == 's3_access') and \
+           not (new_dp.app == 'filebrowser' and new_dp.action == 'adls_access') and \
            not (new_dp.app == 'oozie' and new_dp.action == 'disable_editor_access'):
           GroupPermission.objects.create(group=default_group, hue_permission=new_dp)
 

+ 1 - 0
desktop/Makefile

@@ -41,6 +41,7 @@ include $(ROOT)/Makefile.vars.priv
 
 APPS := core \
 	libs/aws \
+	libs/azure \
 	libs/hadoop \
 	libs/indexer \
 	libs/liboauth \

+ 23 - 0
desktop/conf.dist/hue.ini

@@ -1435,6 +1435,29 @@
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
 
+###########################################################################
+# Settings for the Azure lib
+###########################################################################
+[azure]
+  [[azure_accounts]]
+    # Default Azure account
+    [[[default]]]
+      # Azure credentials
+      ## client_id=
+      ## client_secret=
+      ## refresh_url=https://login.microsoftonline.com/<tenant_id>/oauth2/token
+
+      # Endpoint overrides
+      ## proxy_address=
+      ## proxy_port=8080
+      ## proxy_user=
+      ## proxy_pass=
+
+  [[adls_clusters]]
+    # Default ADLS cluster
+    [[[default]]]
+      # fs_defaultfs=adl://<account_name>.azuredatalakestore.net
+      # webhdfs_url=https://<account_name>.azuredatalakestore.net/webhdfs/v1
 
 ###########################################################################
 # Settings for the Sentry lib

+ 23 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1434,6 +1434,29 @@
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
 
+###########################################################################
+# Settings for the Azure lib
+###########################################################################
+[azure]
+  [[azure_accounts]]
+    # Default Azure account
+    [[[default]]]
+      # Azure credentials
+      ## client_id=
+      ## client_secret=
+      ## refresh_url=https://login.microsoftonline.com/<tenant_id>/oauth2/token
+
+      # Endpoint overrides
+      ## proxy_address=
+      ## proxy_port=8080
+      ## proxy_user=
+      ## proxy_pass=
+
+  [[adls_clusters]]
+    # Default ADLS cluster
+    [[[default]]]
+      # fs_defaultfs=adl://<account_name>.azuredatalakestore.net
+      # webhdfs_url=https://<account_name>.azuredatalakestore.net/webhdfs/v1
 
 ###########################################################################
 # Settings for the Sentry lib

+ 3 - 1
desktop/core/src/desktop/lib/fs/__init__.py

@@ -24,8 +24,10 @@ from desktop.lib.fs.proxyfs import ProxyFS
 
 def splitpath(path):
   split = urlparse.urlparse(path)
-  if split.scheme:
+  if split.scheme and split.netloc:
     parts = [split.scheme + '://', split.netloc] + split.path.split('/')
+  elif split.scheme:
+    parts = [split.scheme + ':/'] + split.path.split('/')
   else:
     parts = ['/'] + posixpath.normpath(path).split('/')
   # Filter empty parts out

+ 6 - 0
desktop/core/src/desktop/lib/fs/proxyfs.py

@@ -126,6 +126,9 @@ class ProxyFS(object):
   def normpath(self, path):
     return self._get_fs(path).normpath(path)
 
+  def netnormpath(self, path):
+    return self._get_fs(path).netnormpath(path)
+
   def open(self, path, *args, **kwargs):
     return self._get_fs(path).open(path, *args, **kwargs)
 
@@ -232,3 +235,6 @@ class ProxyFS(object):
 
   def check_access(self, path, *args, **kwargs):
     self._get_fs(path).check_access(path, *args, **kwargs)
+
+  def mkswap(self, filename, subdir='', suffix='swp', basedir=None):
+    return self._get_fs(basedir).mkswap(filename, subdir, suffix, basedir)

+ 4 - 1
desktop/core/src/desktop/lib/fsmanager.py

@@ -21,6 +21,8 @@ import sys
 import logging
 
 import aws
+import azure.client
+from azure.conf import is_adls_enabled
 from aws.conf import is_enabled as is_s3_enabled
 
 from desktop.lib.fs import ProxyFS
@@ -32,7 +34,8 @@ DEFAULT_SCHEMA = 'hdfs'
 
 FS_GETTERS = {
   "hdfs": cluster.get_hdfs,
-  "s3a": aws.get_s3fs if is_s3_enabled() else None
+  "s3a": aws.get_s3fs if is_s3_enabled() else None,
+  "adl": azure.client.get_client if is_adls_enabled() else None
 }
 
 

+ 2 - 2
desktop/core/src/desktop/lib/rest/resource.py

@@ -131,7 +131,7 @@ class Resource(object):
                        allow_redirects=allow_redirects, clear_cookies=clear_cookies)
 
 
-  def put(self, relpath=None, params=None, data=None, contenttype=None, allow_redirects=False, clear_cookies=False):
+  def put(self, relpath=None, params=None, data=None, contenttype=None, allow_redirects=False, clear_cookies=False, headers=None):
     """
     Invoke the PUT method on a resource.
     @param relpath: Optional. A relative path to this resource's path.
@@ -143,7 +143,7 @@ class Resource(object):
 
     @return: A dictionary of the JSON result.
     """
-    return self.invoke("PUT", relpath, params, data, headers=self._make_headers(contenttype),
+    return self.invoke("PUT", relpath, params, data, headers=self._make_headers(contenttype, headers),
                        allow_redirects=allow_redirects, clear_cookies=clear_cookies)
 
 

+ 10 - 0
desktop/core/src/desktop/models.py

@@ -42,6 +42,7 @@ from django.utils.translation import ugettext as _, ugettext_lazy as _t
 from settings import HUE_DESKTOP_VERSION
 
 from aws.conf import is_enabled as is_s3_enabled, has_s3_access
+from azure.conf import is_adls_enabled, has_adls_access
 from dashboard.conf import get_engines
 from notebook.conf import SHOW_NOTEBOOKS, get_ordered_interpreters
 
@@ -1720,6 +1721,15 @@ class ClusterConfig():
         'page': '/filebrowser/view=S3A://'
       })
 
+    if is_adls_enabled() and has_adls_access(self.user):
+      interpreters.append({
+        'type': 'adls',
+        'displayName': _('ADLS'),
+        'buttonName': _('Browse'),
+        'tooltip': _('ADLS'),
+        'page': '/filebrowser/view=adl:/'
+      })
+
     if 'metastore' in self.apps:
       interpreters.append({
         'type': 'tables',

+ 64 - 2
desktop/core/src/desktop/static/desktop/js/apiHelper.js

@@ -56,7 +56,8 @@ var ApiHelper = (function () {
   var DOCUMENTS_API = "/desktop/api2/doc/";
   var DOCUMENTS_SEARCH_API = "/desktop/api2/docs/";
   var FETCH_CONFIG = '/desktop/api2/get_config/';
-  var HDFS_API_PREFIX = "/filebrowser/view=";
+  var HDFS_API_PREFIX = "/filebrowser/view=/";
+  var ADLS_API_PREFIX = "/filebrowser/view=adl:/";
   var GIT_API_PREFIX = "/desktop/api/vcs/contents/";
   var S3_API_PREFIX = "/filebrowser/view=S3A://";
   var IMPALA_INVALIDATE_API = '/impala/api/invalidate';
@@ -115,6 +116,10 @@ var ApiHelper = (function () {
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'hdfs' }), {});
     });
 
+    huePubSub.subscribe('assist.clear.adls.cache', function () {
+      $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'adls' }), {});
+    });
+
     huePubSub.subscribe('assist.clear.git.cache', function () {
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'git' }), {});
     });
@@ -145,6 +150,7 @@ var ApiHelper = (function () {
         clearAll: true
       });
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'hdfs' }), {});
+      $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'adls' }), {});
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'git' }), {});
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 's3' }), {});
       $.totalStorage(self.getAssistCacheIdentifier({ sourceType: 'collections' }), {});
@@ -391,7 +397,8 @@ var ApiHelper = (function () {
    */
   ApiHelper.prototype.fetchHdfsPath = function (options) {
     var self = this;
-    var url = HDFS_API_PREFIX + "/" + options.pathParts.join("/") + '?format=json&sortby=name&descending=false&pagesize=' + (options.pageSize || 500) + '&pagenum=' + (options.page || 1);
+    options.pathParts.shift();
+    var url = HDFS_API_PREFIX + options.pathParts.join("/") + '?format=json&sortby=name&descending=false&pagesize=' + (options.pageSize || 500) + '&pagenum=' + (options.page || 1);
     if (options.filter) {
       url += '&filter=' + options.filter;
     }
@@ -430,6 +437,61 @@ var ApiHelper = (function () {
     }));
   };
 
+  /**
+   * @param {Object} options
+   * @param {Function} options.successCallback
+   * @param {Function} [options.errorCallback]
+   * @param {boolean} [options.silenceErrors]
+   * @param {Number} [options.timeout]
+   * @param {Object} [options.editor] - Ace editor
+   *
+   * @param {string[]} options.pathParts
+   * @param {number} [options.pageSize] - Default 500
+   * @param {number} [options.page] - Default 1
+   * @param {string} [options.filter]
+   */
+  ApiHelper.prototype.fetchAdlsPath = function (options) {
+    var self = this;
+    options.pathParts.shift();
+    var url = ADLS_API_PREFIX + options.pathParts.join("/") + '?format=json&sortby=name&descending=false&pagesize=' + (options.pageSize || 500) + '&pagenum=' + (options.page || 1);
+    if (options.filter) {
+      url += '&filter=' + options.filter;
+    }
+    var fetchFunction = function (storeInCache) {
+      if (options.timeout === 0) {
+        self.assistErrorCallback(options)({ status: -1 });
+        return;
+      }
+      return $.ajax({
+        dataType: "json",
+        url: url,
+        timeout: options.timeout,
+        success: function (data) {
+          if (!data.error && !self.successResponseIsError(data) && typeof data.files !== 'undefined' && data.files !== null) {
+            if (data.files.length > 2 && !options.filter) {
+              storeInCache(data);
+            }
+            options.successCallback(data);
+          } else {
+            self.assistErrorCallback(options)(data);
+          }
+        }
+      })
+      .fail(self.assistErrorCallback(options))
+      .always(function () {
+        if (typeof options.editor !== 'undefined' && options.editor !== null) {
+          options.editor.hideSpinner();
+        }
+      });
+    };
+
+    return fetchCached.bind(self)($.extend({}, options, {
+      sourceType: 'adls',
+      url: url,
+      fetchFunction: fetchFunction
+    }));
+  };
+
   /**
    * @param {Object} options
    * @param {Function} options.successCallback

+ 258 - 0
desktop/core/src/desktop/static/desktop/js/assist/assistAdlsEntry.js

@@ -0,0 +1,258 @@
+// Licensed to Cloudera, Inc. under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  Cloudera, Inc. licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+var AssistAdlsEntry = (function () {
+
+  var PAGE_SIZE = 100;
+
+  /**
+   * @param {object} options
+   * @param {object} options.definition
+   * @param {string} options.definition.name
+   * @param {string} options.definition.type (file, dir)
+   * @param {AssistAdlsEntry} options.parent
+   * @param {ApiHelper} options.apiHelper
+   * @constructor
+   */
+  function AssistAdlsEntry (options) {
+    var self = this;
+
+    self.definition = options.definition;
+    self.apiHelper = options.apiHelper;
+    self.parent = options.parent;
+    self.path = '';
+    if (self.parent !== null) {
+      self.path = self.parent.path;
+      if (self.parent.path !== '/') {
+        self.path += '/'
+      }
+    }
+    self.path += self.definition.name;
+    self.currentPage = 1;
+    self.hasMorePages = true;
+
+    self.isFilterVisible = ko.observable(false);
+    self.editingSearch = ko.observable(false);
+    self.filter = ko.observable('').extend({ rateLimit: 400 });
+
+    self.isFilterVisible.subscribe(function (newValue) {
+      if (!newValue && self.filter()) {
+        self.filter('');
+      }
+    });
+
+    self.filter.subscribe(function () {
+      self.loadEntries();
+    });
+
+    self.entries = ko.observableArray([]);
+
+    self.loaded = false;
+    self.loading = ko.observable(false);
+    self.loadingMore = ko.observable(false);
+    self.hasErrors = ko.observable(false);
+    self.open = ko.observable(false);
+
+    self.open.subscribe(function(newValue) {
+      if (newValue && self.entries().length == 0) {
+        self.loadEntries();
+      }
+    });
+
+    self.hasEntries = ko.computed(function() {
+      return self.entries().length > 0;
+    });
+  }
+
+  AssistAdlsEntry.prototype.dblClick = function () {
+    var self = this;
+    huePubSub.publish('assist.dblClickAdlsItem', self);
+  };
+
+  AssistAdlsEntry.prototype.toggleSearch = function () {
+    var self = this;
+    self.isFilterVisible(!self.isFilterVisible());
+    self.editingSearch(self.isFilterVisible());
+  };
+
+  AssistAdlsEntry.prototype.loadEntries = function(callback) {
+    var self = this;
+    if (self.loading()) {
+      return;
+    }
+    self.loading(true);
+    self.hasErrors(false);
+
+    var successCallback = function(data) {
+      self.hasMorePages = data.page.next_page_number > self.currentPage;
+      var filteredFiles = $.grep(data.files, function (file) {
+        return file.name !== '.' && file.name !== '..';
+      });
+      self.entries($.map(filteredFiles, function (file) {
+        return new AssistAdlsEntry({
+          definition: file,
+          parent: self,
+          apiHelper: self.apiHelper
+        })
+      }));
+      self.loaded = true;
+      self.loading(false);
+      if (callback) {
+        callback();
+      }
+    };
+
+    var errorCallback = function () {
+      self.hasErrors(true);
+      self.loading(false);
+      if (callback) {
+        callback();
+      }
+    };
+
+    self.apiHelper.fetchAdlsPath({
+      pageSize: PAGE_SIZE,
+      page: self.currentPage,
+      filter: self.isFilterVisible() && self.filter().trim() ? self.filter() : undefined,
+      pathParts: self.getHierarchy(),
+      successCallback: successCallback,
+      errorCallback: errorCallback
+    })
+  };
+
+  AssistAdlsEntry.prototype.goHome = function () {
+    huePubSub.publish('assist.adls.go.home');
+  };
+
+  AssistAdlsEntry.prototype.loadDeep = function(folders, callback) {
+    var self = this;
+
+    if (folders.length === 0) {
+      callback(self);
+      return;
+    }
+
+    var nextName = folders.shift();
+    var loadedPages = 0;
+    var findNextAndLoadDeep = function () {
+
+      var foundEntry = $.grep(self.entries(), function (entry) {
+        return entry.definition.name === nextName && entry.definition.type === 'dir';
+      });
+      var passedAlphabetically = self.entries().length > 0 && self.entries()[self.entries().length - 1].definition.name.localeCompare(nextName) > 0;
+
+      if (foundEntry.length === 1) {
+        foundEntry[0].loadDeep(folders, callback);
+      } else if (!passedAlphabetically && self.hasMorePages && loadedPages < 50) {
+        loadedPages++;
+        self.fetchMore(function () {
+          findNextAndLoadDeep();
+        }, function () {
+          callback(self);
+        });
+      } else {
+        callback(self);
+      }
+    };
+
+    if (! self.loaded) {
+      self.loadEntries(findNextAndLoadDeep);
+    } else {
+      findNextAndLoadDeep();
+    }
+  };
+
+  AssistAdlsEntry.prototype.getHierarchy = function () {
+    var self = this;
+    var parts = [];
+    var entry = self;
+    while (entry != null) {
+      parts.push(entry.definition.name);
+      entry = entry.parent;
+    }
+    parts.reverse();
+    return parts;
+  };
+
+  AssistAdlsEntry.prototype.toggleOpen = function (data, event) {
+    var self = this;
+    if (self.definition.type === 'file') {
+      if (IS_HUE_4) {
+        if (event.ctrlKey || event.metaKey || event.which === 2) {
+          window.open('/hue' + self.definition.url, '_blank');
+        } else {
+          huePubSub.publish('open.link', self.definition.url);
+        }
+      } else {
+        window.open(self.definition.url, '_blank');
+      }
+      return;
+    }
+    self.open(!self.open());
+    if (self.definition.name === '..') {
+      if (self.parent.parent) {
+        huePubSub.publish('assist.selectAdlsEntry', self.parent.parent);
+      }
+    } else {
+      huePubSub.publish('assist.selectAdlsEntry', self);
+    }
+  };
+
+  AssistAdlsEntry.prototype.fetchMore = function (successCallback, errorCallback) {
+    var self = this;
+    if (!self.hasMorePages || self.loadingMore()) {
+      return;
+    }
+    self.currentPage++;
+    self.loadingMore(true);
+    self.hasErrors(false);
+    self.apiHelper.fetchAdlsPath({
+      pageSize: PAGE_SIZE,
+      page: self.currentPage,
+      filter: self.isFilterVisible() && self.filter().trim() ? self.filter() : undefined,
+      pathParts: self.getHierarchy(),
+      successCallback: function (data) {
+        self.hasMorePages = data.page.next_page_number > self.currentPage;
+        var filteredFiles = $.grep(data.files, function (file) {
+          return file.name !== '.' && file.name !== '..';
+        });
+        self.entries(self.entries().concat($.map(filteredFiles, function (file) {
+          return new AssistAdlsEntry({
+            definition: file,
+            parent: self,
+            apiHelper: self.apiHelper
+          });
+        })));
+        self.loadingMore(false);
+        if (successCallback) {
+          successCallback();
+        }
+      },
+      errorCallback: function () {
+        self.hasErrors(true);
+        if (errorCallback) {
+          errorCallback();
+        }
+      }
+    });
+  };
+
+  AssistAdlsEntry.prototype.openInImporter = function () {
+    huePubSub.publish('open.in.importer', this.definition.path);
+  };
+
+  return AssistAdlsEntry;
+})();

+ 0 - 1
desktop/core/src/desktop/static/desktop/js/assist/assistHdfsEntry.js

@@ -139,7 +139,6 @@ var AssistHdfsEntry = (function () {
 
   AssistHdfsEntry.prototype.loadDeep = function(folders, callback) {
     var self = this;
-
     if (folders.length === 0) {
       callback(self);
       return;

+ 79 - 21
desktop/core/src/desktop/static/desktop/js/jquery.filechooser.js

@@ -46,6 +46,64 @@
         HOME: "Home"
       },
       filesystems: ['hdfs'],
+      filesysteminfo: {
+        "": {
+          scheme: "",
+          root: "/",
+          home: "/?default_to_home",
+          icon: {
+            brand: "fa-files-o",
+            home: "fa-home",
+          },
+          label : {
+            home: "home",
+            name: "HDFS",
+          }
+        },
+        hdfs: {
+          scheme: "",
+          root: "/",
+          home: "/?default_to_home",
+          icon: {
+            brand: "fa-files-o",
+            home: "fa-home",
+          },
+          label : {
+            home: "home",
+            name: "HDFS",
+          }
+        },
+        s3a: {
+          scheme: "s3a",
+          root: "s3a://",
+          home: "s3a://",
+          icon: {
+            brand: "fa-cubes",
+            home: "fa-cubes",
+          },
+          label : {
+            home: "",
+            name: "S3"
+          }
+        },
+        adl: {
+          scheme: "adl",
+          root: "adl:/",
+          home: "adl:/",
+          icon: {
+            svg:{
+              brand: "#hi-adls",
+              home: "#hi-adls"
+            },
+            brand: "fa-windows",
+            home: "fa-windows"
+          },
+          label : {
+            home: "",
+            name: "ADLS"
+          }
+        }
+      },
       fsSelected: 'hdfs',
       user: "",
       onNavigate: function () {
@@ -100,8 +158,9 @@
     }
 
     var initialPath = $.trim(self.options.initialPath);
-    if (initialPath && initialPath.toLowerCase().indexOf('s3a') > -1 && $(self.element).data('fs').indexOf('s3a') > -1) {
-      self.options.fsSelected = 's3a';
+    var scheme = initialPath && initialPath.substring(0,initialPath.indexOf(":"));
+    if (scheme && scheme.length) {
+      self.options.fsSelected = scheme;
     }
 
     $(self.element).find('.filechooser-services li').removeClass('active');
@@ -137,22 +196,21 @@
     if (self.options.filesystems.length > 1) {
       var $ul = $('<ul>').addClass('nav nav-list').css('border', 'none');
       self.options.filesystems.forEach(function (fs) {
-        var $li = $('<li>').attr('data-fs', fs).addClass(self.options.fsSelected === fs ? 'active' : '').html('<a class="pointer" style="padding-left: 6px">' + (fs.toUpperCase() == 'S3A' ? 'S3' : fs.toUpperCase()) + '</a>');
+        var filesysteminfo = self.options.filesysteminfo;
+        var $li = $('<li>').attr('data-fs', fs).addClass(self.options.fsSelected === fs ? 'active' : '').html('<a class="pointer" style="padding-left: 6px">' + (filesysteminfo[fs] ? filesysteminfo[fs].label.name : fs.toUpperCase()) + '</a>');
         $li.on('click', function () {
           $(this).siblings().removeClass('active');
           $(this).addClass('active');
           self.options.fsSelected = fs;
           var storedPath = $.totalStorage(STORAGE_PREFIX + self.options.user + self.options.fsSelected);
           if (storedPath !== null) {
-            if (fs === 's3a' && storedPath.toLowerCase().indexOf('s3a') === -1) {
-              self.navigateTo('S3A://');
-            } else if (fs !== 's3a' && storedPath.toLowerCase().indexOf('s3a') > -1) {
-              self.navigateTo('/?default_to_home');
+            if (filesysteminfo[fs] && storedPath.toLowerCase().indexOf(fs) === -1) {
+              self.navigateTo(filesysteminfo[fs].home);
             } else {
-              self.navigateTo(storedPath)
+              self.navigateTo(storedPath);
             }
           } else {
-            self.navigateTo(fs === 's3a' ? 'S3A://' : '/?default_to_home');
+            self.navigateTo(filesysteminfo[fs] ? filesysteminfo[fs].home : '/?default_to_home');
           }
         });
         $li.appendTo($ul);
@@ -163,6 +221,7 @@
     }
   };
 
+  //TODO: refactor this method to template
   Plugin.prototype.navigateTo = function (path) {
     var _parent = this;
     $(_parent.element).find('.filechooser-tree').html("<i style=\"font-size: 24px; color: #DDD\" class=\"fa fa-spinner fa-spin\"></i>");
@@ -185,16 +244,13 @@
         'white-space': 'nowrap'
       });
       var _home = $("<li>");
-      var _homelink = $("<a>").addClass("nounderline").html('<i class="fa fa-home"></i> ' + _parent.options.labels.HOME).css("cursor", "pointer").click(function () {
-        _parent.navigateTo("/?default_to_home");
+      //var filesysteminfo = self.options.filesysteminfo;
+      var fs = _parent.options.filesysteminfo[_parent.options.fsSelected || "hdfs"];
+      var el = fs.icon.svg ? '<svg class="hi"><use xlink:href="'+fs.icon.svg.home+'"></use></svg>' : '<i class="fa '+fs.icon.home+'"></i> ' + fs.label.home;
+      var _homelink = $("<a>").addClass("nounderline").html(el).css("cursor", "pointer").click(function () {
+        _parent.navigateTo(fs.home);
       });
 
-      if (_parent.options.fsSelected === 's3a') {
-        _homelink = $("<a>").addClass("nounderline muted").html('<i class="fa fa-cubes"></i> ').css("cursor", "pointer").click(function () {
-          _parent.navigateTo("S3A://");
-        });
-      }
-
       _homelink.appendTo(_home);
       _home.appendTo($homeBreadcrumb);
 
@@ -529,7 +585,8 @@
         _parent.options.onError();
       }
       if (e.status === 404 || e.status === 500) {
-        _parent.navigateTo(_parent.options.errorRedirectPath != "" ? _parent.options.errorRedirectPath : (_parent.options.fsSelected === 's3a' ? 'S3A://' : '/?default_to_home'));
+        var fs = _parent.options.filesysteminfo[_parent.options.fsSelected || "hdfs"];
+        _parent.navigateTo(_parent.options.errorRedirectPath !== "" ? _parent.options.errorRedirectPath : fs.home);
       } else {
         console.error(e);
         $(document).trigger("error", e.statusText);
@@ -579,9 +636,10 @@
     $(self.element).empty().html('<div class="filechooser-container" style="position: relative"><div class="filechooser-services" style="position: absolute"></div><div class="filechooser-tree" style="width: 560px"></div></div>');
     $.post('/filebrowser/api/get_filesystems', function (data) {
       var initialPath = $.trim(self.options.initialPath);
-      if (data && data.status == 0) {
-        if (initialPath && initialPath.toLowerCase().indexOf('s3a') > -1 && data.filesystems['s3a']) {
-          self.options.fsSelected = 's3a';
+      var scheme = initialPath && initialPath.substring(0,initialPath.indexOf(":"));
+      if (data && data.status === 0) {
+        if (scheme && scheme.length && data.filesystems[scheme]) {
+          self.options.fsSelected = scheme;
         }
         self.setFileSystems(data.filesystems);
       }

+ 11 - 1
desktop/core/src/desktop/static/desktop/js/ko.hue-bindings.js

@@ -4662,6 +4662,16 @@
         dblClickHdfsItemSub.remove();
       });
 
+      var dblClickAdlsItemSub = huePubSub.subscribe("assist.dblClickAdlsItem", function(assistHdfsEntry) {
+        if ($el.data("last-active-editor")) {
+          editor.session.insert(editor.getCursorPosition(), "adl:/" + assistHdfsEntry.path + "'");
+        }
+      });
+
+      disposeFunctions.push(function () {
+        dblClickAdlsItemSub.remove();
+      });
+
 
       var dblClickGitItemSub = huePubSub.subscribe("assist.dblClickGitItem", function(assistGitEntry) {
         if ($el.data("last-active-editor")) {
@@ -4767,7 +4777,7 @@
         drop: function (e, ui) {
           var position = editor.renderer.screenToTextCoordinates(e.clientX, e.clientY);
           var text = ui.helper.text();
-          if (lastMeta.type === 's3' || lastMeta.type === 'hdfs'){
+          if (lastMeta.type === 's3' || lastMeta.type === 'hdfs' || lastMeta.type === 'adls'){
             text = "'" + lastMeta.definition.path + "'";
           }
           editor.moveCursorToPosition(position);

+ 141 - 2
desktop/core/src/desktop/templates/assist.mako

@@ -34,6 +34,7 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
 <script src="${ static('desktop/js/assist/assistDbEntry.js') }"></script>
 <script src="${ static('desktop/js/assist/assistDbSource.js') }"></script>
 <script src="${ static('desktop/js/assist/assistHdfsEntry.js') }"></script>
+<script src="${ static('desktop/js/assist/assistAdlsEntry.js') }"></script>
 <script src="${ static('desktop/js/assist/assistGitEntry.js') }"></script>
 <script src="${ static('desktop/js/assist/assistS3Entry.js') }"></script>
 <script src="${ static('desktop/js/assist/assistCollectionEntry.js') }"></script>
@@ -470,6 +471,22 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
     </div>
   </script>
 
+  <script type="text/html" id="assist-adls-header-actions">
+    <div class="assist-db-header-actions">
+      <a class="inactive-action" href="javascript:void(0)" data-bind="click: goHome" title="Go to ${ home_dir }"><i class="pointer fa fa-home"></i></a>
+      <a class="inactive-action" href="javascript:void(0)" data-bind="click: toggleSearch, css: { 'blue': isFilterVisible }" title="Filter"><i class="pointer fa fa-filter"></i></a>
+      <a class="inactive-action" data-bind="dropzone: {
+            url: '/filebrowser/upload/file?dest=adl:/' + path,
+            params: { dest: path },
+            paramName: 'hdfs_file',
+            onError: function(x, e){ $(document).trigger('error', e); },
+            onComplete: function () { huePubSub.publish('assist.adls.refresh'); } }" title="${_('Upload file')}" href="javascript:void(0)">
+        <div class="dz-message inline" data-dz-message><i class="pointer fa fa-plus" title="${_('Upload file')}"></i></div>
+      </a>
+      <a class="inactive-action" href="javascript:void(0)" data-bind="click: function () { huePubSub.publish('assist.adls.refresh'); }" title="${_('Manual refresh')}"><i class="pointer fa fa-refresh" data-bind="css: { 'fa-spin blue' : loading }"></i></a>
+    </div>
+  </script>
+
   <script type="text/html" id="assist-hdfs-inner-panel">
     <!-- ko with: selectedHdfsEntry -->
     <div class="assist-flex-header assist-breadcrumb" >
@@ -527,6 +544,63 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
     <!-- /ko -->
   </script>
 
+  <script type="text/html" id="assist-adls-inner-panel">
+    <!-- ko with: selectedAdlsEntry -->
+    <div class="assist-flex-header assist-breadcrumb" >
+      <!-- ko if: parent !== null -->
+      <a href="javascript: void(0);" data-bind="appAwareTemplateContextMenu: { template: 'hdfs-context-items', scrollContainer: '.assist-adls-scrollable' }, click: function () { huePubSub.publish('assist.selectAdlsEntry', parent); }">
+        <i class="fa fa-fw fa-chevron-left"></i>
+        <i class="fa fa-fw fa-folder-o"></i>
+        <span data-bind="text: definition.name, tooltip: {'title': path, 'placement': 'top' }"></span>
+      </a>
+      <!-- /ko -->
+      <!-- ko if: parent === null -->
+      <div>
+        <i class="fa fa-fw fa-folder-o"></i>
+        <span data-bind="text: path"></span>
+      </div>
+      <!-- /ko -->
+      <!-- ko template: 'assist-adls-header-actions' --><!-- /ko -->
+    </div>
+    <div class="assist-flex-hdfs-search" data-bind="visible: isFilterVisible()">
+      <div class="assist-filter"><input class="clearable" type="text" placeholder="${ _('Filter...') }" data-bind="clearable: filter, hasFocus: editingSearch, value: filter, valueUpdate: 'afterkeydown'"/></div>
+    </div>
+    <div class="assist-flex-fill assist-adls-scrollable">
+      <div data-bind="visible: ! loading() && ! hasErrors()" style="position: relative;">
+        <!-- ko hueSpinner: { spin: loadingMore, overlay: true } --><!-- /ko -->
+        <ul class="assist-tables" data-bind="foreachVisible: { data: entries, minHeight: 22, container: '.assist-adls-scrollable', fetchMore: $data.fetchMore.bind($data) }">
+          <li class="assist-entry assist-table-link" style="position: relative;" data-bind="appAwareTemplateContextMenu: { template: 'hdfs-context-items', scrollContainer: '.assist-adls-scrollable' }, visibleOnHover: { 'selector': '.assist-actions' }">
+            <div class="assist-actions table-actions" style="opacity: 0;" >
+              <a style="padding: 0 3px;" class="inactive-action" href="javascript:void(0);" data-bind="templatePopover : { contentTemplate: 'hdfs-details-content', titleTemplate: 'hdfs-details-title', minWidth: '320px' }">
+                <i class='fa fa-info' title="${ _('Details') }"></i>
+              </a>
+            </div>
+
+            <a href="javascript:void(0)" class="assist-entry assist-table-link" data-bind="multiClick: { click: toggleOpen, dblClick: dblClick }, attr: {'title': definition.name }">
+              <!-- ko if: definition.type === 'dir' -->
+              <i class="fa fa-fw fa-folder-o muted valign-middle"></i>
+              <!-- /ko -->
+              <!-- ko if: definition.type === 'file' -->
+              <i class="fa fa-fw fa-file-o muted valign-middle"></i>
+              <!-- /ko -->
+              <span draggable="true" data-bind="text: definition.name, draggableText: { text: '\'' + path + '\'', meta: {'type': 'adls', 'definition': definition} }"></span>
+            </a>
+          </li>
+        </ul>
+        <!-- ko if: !loading() && entries().length === 0 -->
+        <ul class="assist-tables">
+          <li class="assist-entry"><span class="assist-no-entries"><!-- ko if: filter() -->${_('No results found')}<!-- /ko --><!-- ko ifnot: filter() -->${_('Empty directory')}<!-- /ko --></span></li>
+        </ul>
+        <!-- /ko -->
+      </div>
+      <!-- ko hueSpinner: { spin: loading, center: true, size: 'large' } --><!-- /ko -->
+      <div class="assist-errors" data-bind="visible: ! loading() && hasErrors()">
+        <span>${ _('Error loading contents.') }</span>
+      </div>
+    </div>
+    <!-- /ko -->
+  </script>
+
   <script type="text/html" id="assist-document-header-actions">
     <div class="assist-db-header-actions">
       <!-- ko if: !loading() && availableTypeFilters().length > 1 -->
@@ -895,8 +969,8 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
       <div class="assist-panel-switches">
         <!-- ko foreach: availablePanels -->
         <div class="inactive-action assist-type-switch" data-bind="click: function () { $parent.visiblePanel($data); }, css: { 'blue': $parent.visiblePanel() === $data }, style: { 'float': rightAlignIcon ? 'right' : 'left' },  attr: { 'title': name }">
-          <!-- ko if: type === 'documents' --><span style="font-size:22px;"><svg class="hi"><use xlink:href="#hi-documents"></use></svg></span><!-- /ko -->
-          <!-- ko if: type !== 'documents' --><i class="fa fa-fw valign-middle" data-bind="css: icon"></i><!-- /ko -->
+          <!-- ko if: iconSvg --><span style="font-size:22px;"><svg class="hi"><use data-bind="attr: {'xlink:href': iconSvg }" xlink:href=''></use></svg></span><!-- /ko -->
+          <!-- ko if: !iconSvg --><i class="fa fa-fw valign-middle" data-bind="css: icon"></i><!-- /ko -->
         </div>
         <!-- /ko -->
       </div>
@@ -962,6 +1036,7 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
         self.name = options.name;
         self.panelData = options.panelData;
         self.rightAlignIcon = !!options.rightAlignIcon;
+        self.iconSvg = options.iconSvg;
 
         self.visible = ko.observable(options.visible || true);
         options.apiHelper.withTotalStorage('assist', 'showingPanel_' + self.type, self.visible, false, options.visible);
@@ -1315,6 +1390,55 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
         this.reload();
       };
 
+      function AssistAdlsPanel (options) {
+        var self = this;
+        self.apiHelper = options.apiHelper;
+
+        self.selectedAdlsEntry = ko.observable();
+
+        var loadPath = function (path) {
+          var parts = path.split('/');
+          parts.shift();
+
+          var currentEntry = new AssistAdlsEntry({
+            definition: {
+              name: '/',
+              type: 'dir'
+            },
+            parent: null,
+            apiHelper: self.apiHelper
+          });
+
+          currentEntry.loadDeep(parts, function (entry) {
+            self.selectedAdlsEntry(entry);
+            entry.open(true);
+          });
+        };
+
+        self.reload = function () {
+          loadPath(self.apiHelper.getFromTotalStorage('assist', 'currentAdlsPath', '/'));
+        };
+
+        huePubSub.subscribe('assist.adls.go.home', function () {
+          loadPath('${ home_dir }');
+          self.apiHelper.setInTotalStorage('assist', 'currentAdlsPath', '${ home_dir }');
+        });
+
+        huePubSub.subscribe('assist.selectAdlsEntry', function (entry) {
+          self.selectedAdlsEntry(entry);
+          self.apiHelper.setInTotalStorage('assist', 'currentAdlsPath', entry.path);
+        });
+
+        huePubSub.subscribe('assist.adls.refresh', function () {
+          huePubSub.publish('assist.clear.adls.cache');
+          self.reload();
+        });
+      }
+
+      AssistAdlsPanel.prototype.init = function () {
+        this.reload();
+      };
+
       /**
        * @param {Object} options
        * @param {ApiHelper} options.apiHelper
@@ -1702,6 +1826,20 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
                 }));
               }
 
+              if (appConfig['browser'] && appConfig['browser']['interpreter_names'].indexOf('adls') != -1) {
+                panels.push(new AssistInnerPanel({
+                  panelData: new AssistAdlsPanel({
+                    apiHelper: self.apiHelper
+                  }),
+                  apiHelper: self.apiHelper,
+                  name: '${ _("ADLS") }',
+                  type: 'adls',
+                  icon: 'fa-windows',
+                  iconSvg: '#hi-adls',
+                  minHeight: 50
+                }));
+              }
+
               if (appConfig['browser'] && appConfig['browser']['interpreter_names'].indexOf('indexes') != -1) {
                 panels.push(new AssistInnerPanel({
                   panelData: new AssistCollectionsPanel({
@@ -1737,6 +1875,7 @@ from notebook.conf import ENABLE_QUERY_BUILDER, ENABLE_QUERY_SCHEDULING, get_ord
                 name: '${ _("Documents") }',
                 type: 'documents',
                 icon: 'fa-files-o',
+                iconSvg: '#hi-documents',
                 minHeight: 50,
                 rightAlignIcon: true,
                 visible: params.visibleAssistPanels && params.visibleAssistPanels.indexOf('documents') !== -1

+ 3 - 0
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -223,6 +223,9 @@ class S3FileSystem(object):
   def normpath(path):
     return normpath(path)
 
+  def netnormpath(self, path):
+    return normpath(path)
+
   @staticmethod
   def parent_path(path):
     parent_dir = S3FileSystem._append_separator(path)

+ 34 - 0
desktop/libs/azure/Makefile

@@ -0,0 +1,34 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+ifeq ($(ROOT),)
+  $(error "Error: Expect the environment variable $$ROOT to point to the Desktop installation")
+endif
+
+include $(ROOT)/Makefile.sdk
+
+default::
+	@echo '  env-install    : Install into virtual-env'
+
+#
+# env-install
+#   Install app into the virtual environment.
+#
+.PHONY: env-install
+env-install: compile ext-env-install
+	@echo '--- Installing $(APP_NAME) into virtual-env'
+	@$(ENV_PYTHON) setup.py develop -N -q

+ 1 - 0
desktop/libs/azure/babel.cfg

@@ -0,0 +1 @@
+[python: src/azure/**.py]

+ 1 - 0
desktop/libs/azure/hueversion.py

@@ -0,0 +1 @@
+../../VERSION

+ 29 - 0
desktop/libs/azure/setup.py

@@ -0,0 +1,29 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from setuptools import setup, find_packages
+from hueversion import VERSION
+
+setup(
+  name='azure',
+  version=VERSION,
+  url='http://github.com/cloudera/hue',
+  description='Azure Libraries',
+  packages=find_packages('src'),
+  package_dir={'': 'src'},
+  install_requires=['setuptools', 'desktop'],
+  entry_points={'desktop.sdk.lib': 'azure=azure'}
+)

+ 15 - 0
desktop/libs/azure/src/azure/__init__.py

@@ -0,0 +1,15 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

+ 65 - 0
desktop/libs/azure/src/azure/active_directory.py

@@ -0,0 +1,65 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+
+import logging
+
+from time import time
+from azure.conf import get_default_region, get_default_client_id, get_default_authentication_code, get_default_refresh_url, get_default_grant_type
+from desktop.lib.rest import http_client, resource
+
+LOG = logging.getLogger(__name__)
+
+class ActiveDirectory(object):
+  def __init__(self, url=None, aws_access_key_id=None, aws_secret_access_key=None):
+    self._access_key_id = aws_access_key_id
+    self._secret_access_key = aws_secret_access_key
+    self._url = url;
+
+    self._client = http_client.HttpClient(url, logger=LOG)
+    self._root = resource.Resource(self._client)
+    self._token = None
+
+  def get_token(self):
+    is_token_expired = self._token is None or time() >= self._token["expires_on"]
+    if is_token_expired:
+      LOG.debug("Authenticating to Azure Active Directory: %s" % self._url)
+      data = {
+        "grant_type" : "client_credentials",
+        "resource" : "https://management.core.windows.net/",
+        "client_id" : self._access_key_id,
+        "client_secret" : self._secret_access_key
+      }
+      self._token = self._root.post("/", data=data);
+      self._token["expires_on"] = int(self._token["expires_on"])
+
+    return self._token["token_type"] + " " + self._token["access_token"]
+
+  @classmethod
+  def from_config(cls, conf):
+    access_key_id = get_default_client_id()
+    secret_access_key = get_default_authentication_code()
+
+    if None in (access_key_id, secret_access_key):
+      raise ValueError('Can\'t create azure client, credential is not configured')
+
+    url = get_default_refresh_url()
+
+    return cls(
+      url,
+      aws_access_key_id=access_key_id,
+      aws_secret_access_key=secret_access_key
+    )

+ 15 - 0
desktop/libs/azure/src/azure/adls/__init__.py

@@ -0,0 +1,15 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

+ 86 - 0
desktop/libs/azure/src/azure/adls/webhdfs.py

@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Interfaces for ADLS via HttpFs/WebHDFS
+"""
+import logging
+import threading
+
+from hadoop.fs.webhdfs import WebHdfs as HadoopWebHdfs
+from hadoop.fs.exceptions import WebHdfsException
+from hadoop.hdfs_site import get_umask_mode
+from desktop.lib.rest import http_client, resource
+from azure.conf import get_default_adls_url, get_default_adls_fs
+from urlparse import urlparse
+
+LOG = logging.getLogger(__name__)
+
+
+class WebHdfs(HadoopWebHdfs):
+  def __init__(self, url,
+               fs_defaultfs,
+               logical_name=None,
+               hdfs_superuser=None,
+               security_enabled=False,
+               ssl_cert_ca_verify=True,
+               temp_dir="/tmp",
+               umask=01022,
+               hdfs_supergroup=None,
+               auth_provider=None):
+    self._url = url
+    self._superuser = hdfs_superuser
+    self._security_enabled = security_enabled
+    self._ssl_cert_ca_verify = ssl_cert_ca_verify
+    self._temp_dir = temp_dir
+    self._umask = umask
+    self._fs_defaultfs = fs_defaultfs
+    self._logical_name = logical_name
+    self._supergroup = hdfs_supergroup
+    self._auth_provider = auth_provider
+    split = urlparse(fs_defaultfs)
+    self._scheme = split.scheme
+    self._netloc = split.netloc
+    self._is_remote = True
+    self._has_trash_support = False
+
+    self._client = http_client.HttpClient(url, exc_class=WebHdfsException, logger=LOG)
+    self._root = resource.Resource(self._client)
+
+    # To store user info
+    self._thread_local = threading.local()
+
+    LOG.debug("Initializing Azure ADLS WebHdfs: %s (security: %s, superuser: %s)" % (self._url, self._security_enabled, self._superuser))
+
+  @classmethod
+  def from_config(cls, hdfs_config, auth_provider):
+    fs_defaultfs = get_default_adls_fs()
+    url = get_default_adls_url()
+    return cls(url=url,
+               fs_defaultfs=fs_defaultfs,
+               logical_name=None,
+               security_enabled=False,
+               ssl_cert_ca_verify=False,
+               temp_dir=None,
+               umask=get_umask_mode(),
+               hdfs_supergroup=None,
+               auth_provider=auth_provider)
+
+  def _getheaders(self):
+    return {
+      "Authorization": self._auth_provider.get_token(),
+    }

+ 56 - 0
desktop/libs/azure/src/azure/client.py

@@ -0,0 +1,56 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+
+import logging
+import os
+
+from azure import conf
+from azure.adls.webhdfs import WebHdfs
+from azure.active_directory import ActiveDirectory
+
+LOG = logging.getLogger(__name__)
+
+CLIENT_CACHE = None
+
+def get_client(identifier='default'):
+  global CLIENT_CACHE
+  _init_clients()
+  if identifier not in CLIENT_CACHE["adls"]:
+    raise ValueError('Unknown azure client: %s, check your configuration' % identifier)
+  return CLIENT_CACHE["adls"][identifier]
+
+def _init_clients():
+  global CLIENT_CACHE
+  if CLIENT_CACHE is not None:
+    return
+  CLIENT_CACHE = {}
+  CLIENT_CACHE["azure"] = {}
+  CLIENT_CACHE["adls"] = {}
+  for identifier in conf.AZURE_ACCOUNTS.keys():
+    CLIENT_CACHE["azure"][identifier] = _make_azure_client(identifier)
+
+  for identifier in conf.ADLS_CLUSTERS.keys():
+    CLIENT_CACHE["adls"][identifier] = _make_adls_client(identifier)
+
+def _make_adls_client(identifier):
+  client_conf = conf.ADLS_CLUSTERS[identifier]
+  azure_client = CLIENT_CACHE["azure"][identifier]
+  return WebHdfs.from_config(client_conf, azure_client)
+
+def _make_azure_client(identifier):
+  client_conf = conf.AZURE_ACCOUNTS[identifier]
+  return ActiveDirectory.from_config(client_conf)

+ 112 - 0
desktop/libs/azure/src/azure/conf.py

@@ -0,0 +1,112 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import
+
+import logging
+import re
+import azure
+
+from django.utils.translation import ugettext_lazy as _, ugettext as _t
+from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, coerce_bool, coerce_password_from_script
+from hadoop.core_site import get_adls_client_id, get_adls_authentication_code, get_adls_refresh_url, get_adls_grant_type
+
+
+LOG = logging.getLogger(__name__)
+
+
+def get_default_client_id():
+  """
+  Attempt to set AWS access key ID from script, else core-site, else None
+  """
+  client_id_script = AZURE_ACCOUNTS['default'].CLIENT_ID.get()
+  return client_id_script or get_adls_client_id()
+
+
+def get_default_authentication_code():
+  """
+  Attempt to set AWS secret key from script, else core-site, else None
+  """
+  client_secret_script = AZURE_ACCOUNTS['default'].CLIENT_SECRET.get()
+  return client_secret_script or get_adls_authentication_code()
+
+def get_default_refresh_url():
+  refresh_url = AZURE_ACCOUNTS['default'].REFRESH_URL.get()
+  refresh_url = refresh_url if refresh_url else get_adls_refresh_url()
+  return refresh_url or get_adls_refresh_url()
+
+def get_default_grant_type():
+  grant_type = AZURE_ACCOUNTS['default'].GRANT_TYPE.get()
+  return grant_type or get_adls_grant_type()
+
+def get_default_region():
+  return ""
+
+def get_default_adls_url():
+  return ADLS_CLUSTERS['default'].WEBHDFS_URL.get()
+
+def get_default_adls_fs():
+  return ADLS_CLUSTERS['default'].FS_DEFAULTFS.get()
+
+ADLS_CLUSTERS = UnspecifiedConfigSection(
+  "adls_clusters",
+  help="One entry for each ADLS cluster",
+  each=ConfigSection(
+    help="Information about a single ADLS cluster",
+    members=dict(
+      FS_DEFAULTFS=Config("fs_defaultfs", help="adl://<account_name>.azuredatalakestore.net", type=str, default=None),
+      WEBHDFS_URL=Config("webhdfs_url",
+                         help="https://<account_name>.azuredatalakestore.net/webhdfs/v1",
+                         type=str, default=None),
+    )
+  )
+)
+
+AZURE_ACCOUNTS = UnspecifiedConfigSection(
+  "azure_accounts",
+  help="One entry for each Azure account",
+  each=ConfigSection(
+    help="Information about a single azure account",
+    members=dict(
+      CLIENT_ID=Config("client_id", help="", default=None),
+      CLIENT_SECRET=Config("client_secret", help="", default=None),
+      REFRESH_URL=Config("refresh_url",help="https://login.microsoftonline.com/<tenant_id>/oauth2/token", default=None),
+      GRANT_TYPE=Config("grant_type",
+                         help="",
+                         type=str, default="client_credentials")
+    )
+  )
+)
+
+
+def is_adls_enabled():
+  return ('default' in AZURE_ACCOUNTS.keys() and AZURE_ACCOUNTS['default'].get_raw() and AZURE_ACCOUNTS['default'].CLIENT_ID.get() is not None)
+
+def has_adls_access(user):
+  return user.is_authenticated() and user.is_active and (user.is_superuser or user.has_hue_permission(action="adls_access", app="filebrowser"))
+
+def config_validator(user):
+  res = []
+
+  if is_adls_enabled():
+    try:
+      headers = azure.get_client('default')._getheaders()
+      if len(headers['authorization']) <= 0:
+        raise ValueError('Failed to obtain Azure authorization token')
+    except Exception, e:
+      LOG.exception('Failed to obtain Azure authorization token.')
+      res.append(('azure', _t('Failed to obtain Azure authorization token, check your azure configuration.')))
+
+  return res

+ 40 - 0
desktop/libs/azure/src/azure/locale/de/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# German translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: de <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/en/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# English translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: en <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 39 - 0
desktop/libs/azure/src/azure/locale/en_US.pot

@@ -0,0 +1,39 @@
+# Translations template for Hue.
+# Copyright (C) 2017 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2017.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/es/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Spanish translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: es <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/fr/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# French translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: fr <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n > 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/ja/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Japanese translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: ja <LL@li.org>\n"
+"Plural-Forms: nplurals=1; plural=0\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/ko/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Korean translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: ko <LL@li.org>\n"
+"Plural-Forms: nplurals=1; plural=0\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/pt/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Portuguese translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: pt <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/pt_BR/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Portuguese (Brazil) translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: pt_BR <LL@li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n > 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 40 - 0
desktop/libs/azure/src/azure/locale/zh_CN/LC_MESSAGES/django.po

@@ -0,0 +1,40 @@
+# Chinese (China) translations for Hue.
+# Copyright (C) 2015 Cloudera, Inc
+# This file is distributed under the same license as the Hue project.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: Hue VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
+"POT-Creation-Date: 2017-09-22 10:22-0700\n"
+"PO-Revision-Date: 2017-09-22 10:22-0700\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: zh_CN <LL@li.org>\n"
+"Plural-Forms: nplurals=1; plural=0\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.6\n"
+
+#: src/azure/conf.py:94
+msgid "Proxy address to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:100
+msgid "Proxy port to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:106
+msgid "Proxy user to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:112
+msgid "Proxy password to use for the S3 connection."
+msgstr ""
+
+#: src/azure/conf.py:137
+msgid "Failed to connect to azure, check your azure credentials."
+msgstr ""
+

+ 34 - 0
desktop/libs/hadoop/src/hadoop/core_site.py

@@ -33,6 +33,12 @@ _CNF_TRASH_INTERVAL = 'fs.trash.interval'
 _CNF_S3A_ACCESS_KEY = 'fs.s3a.access.key'
 _CNF_S3A_SECRET_KEY = 'fs.s3a.secret.key'
 
+_CNF_ADLS_CLIENT_ID = 'dfs.adls.oauth2.client.id'
+_CNF_ADLS_AUTHENTICATION_CODE = 'dfs.adls.oauth2.credential'
+_CNF_ADLS_SECRET_KEY = 'dfs.adls.oauth2.credential'
+_CNF_ADLS_REFRESH_URL = 'dfs.adls.oauth2.refresh.url'
+_CNF_ADLS_GRANT_TYPE = 'dfs.adls.oauth2.access.token.provider.type'
+
 def reset():
   """Reset the cached conf"""
   global _CORE_SITE_DICT
@@ -91,3 +97,31 @@ def get_s3a_secret_key():
   https://hadoop.apache.org/docs/stable/hadoop-aws/tools/hadoop-aws/index.html
   """
   return get_conf().get(_CNF_S3A_SECRET_KEY)
+
+def get_adls_client_id():
+  """
+  Get ADLS client id
+  https://hadoop.apache.org/docs/stable/hadoop-aws/tools/hadoop-aws/index.html
+  """
+  return get_conf().get(_CNF_ADLS_CLIENT_ID)
+
+def get_adls_authentication_code():
+  """
+  Get ADLS secret key
+  https://hadoop.apache.org/docs/stable/hadoop-aws/tools/hadoop-aws/index.html
+  """
+  return get_conf().get(_CNF_ADLS_AUTHENTICATION_CODE)
+
+def get_adls_refresh_url():
+  """
+  Get ADLS secret key
+  https://hadoop.apache.org/docs/stable/hadoop-aws/tools/hadoop-aws/index.html
+  """
+  return get_conf().get(_CNF_ADLS_REFRESH_URL)
+
+def get_adls_grant_type():
+  """
+  Get ADLS provider type
+  https://hadoop.apache.org/docs/stable/hadoop-aws/tools/hadoop-aws/index.html
+  """
+  return get_conf().get(_CNF_ADLS_GRANT_TYPE)

+ 5 - 1
desktop/libs/hadoop/src/hadoop/fs/upload.py

@@ -32,7 +32,10 @@ import time
 from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers, StopUpload, UploadFileException, SkipFile
 from django.utils.translation import ugettext as _
 
+
 import hadoop.cluster
+from desktop.lib import fsmanager
+from urlparse import urlparse
 from hadoop.conf import UPLOAD_CHUNK_SIZE
 from hadoop.fs.exceptions import WebHdfsException
 
@@ -139,7 +142,6 @@ class HDFSfileUploadHandler(FileUploadHandler):
     self._activated = False
     self._destination = request.GET.get('dest', None) # GET param avoids infinite looping
     self.request = request
-    # Need to directly modify FileUploadHandler.chunk_size
     FileUploadHandler.chunk_size = UPLOAD_CHUNK_SIZE.get()
 
     LOG.debug("Chunk size = %d" % FileUploadHandler.chunk_size)
@@ -149,6 +151,8 @@ class HDFSfileUploadHandler(FileUploadHandler):
     if field_name.upper().startswith('HDFS'):
       LOG.info('Using HDFSfileUploadHandler to handle file upload.')
       try:
+        fs_ref = self.request.REQUEST.get('fs', 'default')
+        self.request.fs = fsmanager.get_filesystem(fs_ref)
         self._file = HDFStemporaryUploadedFile(self.request, file_name, self._destination)
         LOG.debug('Upload attempt to %s' % (self._file.get_temp_path(),))
         self._activated = True

+ 123 - 61
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -26,10 +26,11 @@ import stat
 import threading
 import time
 
+from urlparse import urlparse
 from django.utils.encoding import smart_str
 from django.utils.translation import ugettext as _
 from desktop.lib.rest import http_client, resource
-from hadoop.fs import normpath, SEEK_SET, SEEK_CUR, SEEK_END
+from hadoop.fs import normpath as fs_normpath, SEEK_SET, SEEK_CUR, SEEK_END
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.webhdfs_types import WebHdfsStat, WebHdfsContentSummary
@@ -73,6 +74,10 @@ class WebHdfs(Hdfs):
     self._fs_defaultfs = fs_defaultfs
     self._logical_name = logical_name
     self._supergroup = hdfs_supergroup
+    self._scheme = ""
+    self._netloc = "";
+    self._is_remote = False
+    self._has_trash_support = True
 
     self._client = self._make_client(url, security_enabled, ssl_cert_ca_verify)
     self._root = resource.Resource(self._client)
@@ -169,7 +174,9 @@ class WebHdfs(Hdfs):
         path = self.get_home_dir()
       params = self._getparams()
       params['op'] = 'GETTRASHROOT'
-      json = self._root.get(path, params)
+      headers = self._getheaders()
+
+      json = self._root.get(path, params, headers)
       trash_path = json['Path']
     except WebHdfsException, e:
       exceptions = ['IllegalArgumentException', 'UnsupportedOperationException']
@@ -188,24 +195,58 @@ class WebHdfs(Hdfs):
       "doas" : self.user
     }
 
+  def _getheaders(self):
+    return None
+
   def setuser(self, user):
     """Set a new user. Return the current user."""
     curr = self.user
     self._thread_local.user = user
     return curr
 
+  def is_absolute(self, path):
+    length = len(self._scheme)
+    return path.startswith(self._scheme) if self._scheme else path == '/'
+
+  def strip_normpath(self, path):
+    split = urlparse(path)
+    path = split._replace(scheme="", netloc="").geturl()
+    return Hdfs.normpath(path)
+
+  def normpath(self, path):
+    """
+    Return normalized path but ignore leading scheme prefix if it exists
+    """
+    path = fs_normpath(path)
+    #fs_normpath clears scheme:/ to scheme: which doesn't make sense
+    split = urlparse(path)
+    if not split.path:
+        path = split._replace(path="/").geturl()
+    return path
+
+  def netnormpath(self, path):
+    path = self.normpath(path)
+    if not self._is_remote:
+      return path
+  
+    split = urlparse(path)
+    if not split.netloc:
+      path = split._replace(netloc=self._netloc).geturl()
+    return path
+
   def listdir_stats(self, path, glob=None):
     """
     listdir_stats(path, glob=None) -> [ WebHdfsStat ]
 
     Get directory listing with stats.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     if glob is not None:
       params['filter'] = glob
     params['op'] = 'LISTSTATUS'
-    json = self._root.get(path, params)
+    headers = self._getheaders()
+    json = self._root.get(path, params, headers)
     filestatus_list = json['FileStatuses']['FileStatus']
     return [ WebHdfsStat(st, path) for st in filestatus_list ]
 
@@ -222,20 +263,22 @@ class WebHdfs(Hdfs):
     """
     get_content_summary(path) -> WebHdfsContentSummary
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'GETCONTENTSUMMARY'
-    json = self._root.get(path, params)
+    headers = self._getheaders()
+    json = self._root.get(path, params, headers)
     return WebHdfsContentSummary(json['ContentSummary'])
 
 
   def _stats(self, path):
     """This version of stats returns None if the entry is not found"""
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'GETFILESTATUS'
+    headers = self._getheaders()
     try:
-      json = self._root.get(path, params)
+      json = self._root.get(path, params, headers)
       return WebHdfsStat(json['FileStatus'], path)
     except WebHdfsException, ex:
       if ex.server_exc == 'FileNotFoundException' or ex.code == 404:
@@ -267,7 +310,7 @@ class WebHdfs(Hdfs):
     return not sb.isDir
 
   def isroot(self, path):
-    return path == '/'
+    return urlparse(path).path == '/'
 
   def _ensure_current_trash_directory(self, path):
     """Create trash directory for a user if it doesn't exist."""
@@ -284,6 +327,7 @@ class WebHdfs(Hdfs):
 
     Trash must be enabled for this to work.
     """
+    path = self.strip_normpath(path)
     if not self.exists(path):
       raise IOError(errno.ENOENT, _("File %s not found") % path)
 
@@ -310,11 +354,12 @@ class WebHdfs(Hdfs):
 
     Delete a file or directory.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'DELETE'
     params['recursive'] = recursive and 'true' or 'false'
-    result = self._root.delete(path, params)
+    headers = self._getheaders()
+    result = self._root.delete(path, params, headers)
     # This part of the API is nonsense.
     # The lack of exception should indicate success.
     if not result['boolean']:
@@ -322,7 +367,7 @@ class WebHdfs(Hdfs):
 
   def remove(self, path, skip_trash=False):
     """Delete a file."""
-    if skip_trash:
+    if skip_trash or self._has_trash_support is False:
       self._delete(path, recursive=False)
     else:
       self._trash(path, recursive=False)
@@ -333,7 +378,7 @@ class WebHdfs(Hdfs):
 
   def rmtree(self, path, skip_trash=False):
     """Delete a tree recursively."""
-    if skip_trash:
+    if skip_trash or self._has_trash_support is False:
       self._delete(path, recursive=True)
     else:
       self._trash(path, recursive=True)
@@ -379,29 +424,30 @@ class WebHdfs(Hdfs):
 
     Creates a directory and any parent directory if necessary.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'MKDIRS'
-
+    headers = self._getheaders()
     if mode is None:
       mode = self.getDefaultDirPerms()
     params['permission'] = safe_octal(mode)
 
-    success = self._root.put(path, params)
+    success = self._root.put(path, params, headers=headers)
     if not success:
       raise IOError(_("Mkdir failed: %s") % path)
 
   def rename(self, old, new):
     """rename(old, new)"""
-    old = Hdfs.normpath(old)
-    if not new.startswith('/'):
+    old = self.strip_normpath(old)
+    if not self.is_absolute(new):
       new = Hdfs.join(Hdfs.dirname(old), new)
-    new = Hdfs.normpath(new)
+    new = self.strip_normpath(new)
     params = self._getparams()
     params['op'] = 'RENAME'
     # Encode `new' because it's in the params
     params['destination'] = smart_str(new)
-    result = self._root.put(old, params)
+    headers = self._getheaders()
+    result = self._root.put(old, params, headers=headers)
     if not result['boolean']:
       raise IOError(_("Rename failed: %s -> %s") %
                     (str(smart_str(old)), str(smart_str(new))))
@@ -423,23 +469,25 @@ class WebHdfs(Hdfs):
     params = self._getparams()
     params['op'] = 'SETREPLICATION'
     params['replication'] = repl_factor
-    result = self._root.put(filename, params)
+    headers = self._getheaders()
+    result = self._root.put(filename, params, headers=headers)
     return result['boolean']
 
   def chown(self, path, user=None, group=None, recursive=False):
     """chown(path, user=None, group=None, recursive=False)"""
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'SETOWNER'
     if user is not None:
       params['owner'] = user
     if group is not None:
       params['group'] = group
+    headers = self._getheaders()
     if recursive:
       for xpath in self.listdir_recursive(path):
-        self._root.put(xpath, params)
+        self._root.put(xpath, params, headers=headers)
     else:
-      self._root.put(path, params)
+      self._root.put(path, params, headers=headers)
 
 
   def chmod(self, path, mode, recursive=False):
@@ -448,23 +496,27 @@ class WebHdfs(Hdfs):
 
     `mode' should be an octal integer or string.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'SETPERMISSION'
     params['permission'] = safe_octal(mode)
+    headers = self._getheaders()
     if recursive:
       for xpath in self.listdir_recursive(path):
-        self._root.put(xpath, params)
+        self._root.put(xpath, params, headers=headers)
     else:
-      self._root.put(path, params)
+      self._root.put(path, params, headers=headers)
 
 
   def get_home_dir(self):
     """get_home_dir() -> Home directory for the current user"""
     params = self._getparams()
     params['op'] = 'GETHOMEDIRECTORY'
-    res = self._root.get(params=params)
-    return res['Path']
+    headers = self._getheaders()
+    res = self._root.get(params=params, headers=headers)
+    for key, value in res.iteritems():
+      if key.lower() == "path":
+        return self.normpath(value)
 
 
   def read(self, path, offset, length, bufsize=None):
@@ -473,15 +525,16 @@ class WebHdfs(Hdfs):
 
     Read data from a file.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'OPEN'
     params['offset'] = long(offset)
     params['length'] = long(length)
     if bufsize is not None:
       params['bufsize'] = bufsize
+    headers = self._getheaders()
     try:
-      return self._root.get(path, params)
+      return self._root.get(path, params, headers)
     except WebHdfsException, ex:
       if "out of the range" in ex.message:
         return ""
@@ -500,11 +553,11 @@ class WebHdfs(Hdfs):
 
 
   def getDefaultFilePerms(self):
-    return 0666 & (01777 ^ self.umask)
+    return 0666 & (01777 ^ self._umask)
 
 
   def getDefaultDirPerms(self):
-    return 01777 & (01777 ^ self.umask)
+    return 01777 & (01777 ^ self._umask)
 
 
   def create(self, path, overwrite=False, blocksize=None, replication=None, permission=None, data=None):
@@ -514,7 +567,7 @@ class WebHdfs(Hdfs):
     Creates a file with the specified parameters.
     `permission' should be an octal integer or string.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'CREATE'
     params['overwrite'] = overwrite and 'true' or 'false'
@@ -525,8 +578,8 @@ class WebHdfs(Hdfs):
     if permission is None:
       permission = self.getDefaultFilePerms()
     params['permission'] = safe_octal(permission)
-
-    self._invoke_with_redirect('PUT', path, params, data)
+    headers = self._getheaders()
+    self._invoke_with_redirect('PUT', path, params, data, headers)
 
 
   def append(self, path, data):
@@ -535,66 +588,73 @@ class WebHdfs(Hdfs):
 
     Append data to a given file.
     """
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'APPEND'
-    self._invoke_with_redirect('POST', path, params, data)
+    headers = self._getheaders()
+    self._invoke_with_redirect('POST', path, params, data, headers)
 
 
   # e.g. ACLSPEC = user:joe:rwx,user::rw-
   def modify_acl_entries(self, path, aclspec):
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'MODIFYACLENTRIES'
     params['aclspec'] = aclspec
-    return self._root.put(path, params)
+    headers = self._getheaders()
+    return self._root.put(path, params, headers=headers)
 
 
   def remove_acl_entries(self, path, aclspec):
-      path = Hdfs.normpath(path)
+      path = self.strip_normpath(path)
       params = self._getparams()
       params['op'] = 'REMOVEACLENTRIES'
       params['aclspec'] = aclspec
-      return self._root.put(path, params)
+      headers = self._getheaders()
+      return self._root.put(path, params, headers=headers)
 
 
   def remove_default_acl(self, path):
-      path = Hdfs.normpath(path)
+      path = self.strip_normpath(path)
       params = self._getparams()
       params['op'] = 'REMOVEDEFAULTACL'
-      return self._root.put(path, params)
+      headers = self._getheaders()
+      return self._root.put(path, params, headers=headers)
 
 
   def remove_acl(self, path):
-      path = Hdfs.normpath(path)
+      path = self.strip_normpath(path)
       params = self._getparams()
       params['op'] = 'REMOVEACL'
-      return self._root.put(path, params)
+      headers = self._getheaders()
+      return self._root.put(path, params, headers=headers)
 
 
   def set_acl(self, path, aclspec):
-      path = Hdfs.normpath(path)
+      path = self.strip_normpath(path)
       params = self._getparams()
       params['op'] = 'SETACL'
       params['aclspec'] = aclspec
-      return self._root.put(path, params)
+      headers = self._getheaders()
+      return self._root.put(path, params, headers=headers)
 
 
   def get_acl_status(self, path):
-      path = Hdfs.normpath(path)
+      path = self.strip_normpath(path)
       params = self._getparams()
       params['op'] = 'GETACLSTATUS'
-      return self._root.get(path, params)
+      headers = self._getheaders()
+      return self._root.get(path, params, headers=headers)
 
 
   def check_access(self, path, aclspec='rw-'):
-    path = Hdfs.normpath(path)
+    path = self.strip_normpath(path)
     params = self._getparams()
     params['op'] = 'CHECKACCESS'
     params['fsaction'] = aclspec
-
+    headers = self._getheaders()
     try:
-      return self._root.get(path, params)
+      return self._root.get(path, params, headers)
     except WebHdfsException, ex:
       if ex.code == 500 or ex.code == 400:
         LOG.warn('Failed to check access to path %s, CHECKACCESS operation may not be supported.' % path)
@@ -687,8 +747,8 @@ class WebHdfs(Hdfs):
       sb = self._stats(src)
       dir_mode=oct(stat.S_IMODE(sb.mode))
 
-    src = self.abspath(src)
-    dest = self.abspath(dest)
+    src = self.strip_normpath(src)
+    dest = self.strip_normpath(dest)
 
     if not self.exists(src):
       raise IOError(errno.ENOENT, _("File not found: %s") % src)
@@ -732,7 +792,7 @@ class WebHdfs(Hdfs):
     return posixpath.join(self.fs_defaultfs, path.lstrip('/'))
 
 
-  def _invoke_with_redirect(self, method, path, params=None, data=None):
+  def _invoke_with_redirect(self, method, path, params=None, data=None, headers=None):
     """
     Issue a request, and expect a redirect, and then submit the data to
     the redirected location. This is used for create, write, etc.
@@ -742,7 +802,7 @@ class WebHdfs(Hdfs):
     next_url = None
     try:
       # Do not pass data in the first leg.
-      self._root.invoke(method, path, params)
+      self._root.invoke(method, path, params, headers=headers)
     except WebHdfsException, ex:
       # This is expected. We get a 307 redirect.
       # The following call may throw.
@@ -756,8 +816,9 @@ class WebHdfs(Hdfs):
 
     # Make sure to reuse the session in order to preserve the Kerberos cookies.
     client._session = self._client._session
-
-    headers = {'Content-Type': 'application/octet-stream'}
+    if headers is None:
+      headers = {}
+    headers["Content-Type"] = 'application/octet-stream'
     return resource.Resource(client).invoke(method, data=data, headers=headers)
 
 
@@ -786,7 +847,8 @@ class WebHdfs(Hdfs):
     params = self._getparams()
     params['op'] = 'GETDELEGATIONTOKEN'
     params['renewer'] = renewer
-    res = self._root.get(params=params)
+    headers = self._getheaders()
+    res = self._root.get(params=params, headers=headers)
     return res['Token']['urlString']
 
 
@@ -832,7 +894,7 @@ class File(object):
   """
   def __init__(self, fs, path, mode='r'):
     self._fs = fs
-    self._path = normpath(path)
+    self._path = fs_normpath(path)
     self._pos = 0
     self._mode = mode
 

+ 2 - 0
desktop/libs/indexer/src/indexer/api3.py

@@ -169,8 +169,10 @@ def guess_field_types(request):
 @api_error_handler
 def importer_submit(request):
   source = json.loads(request.POST.get('source', '{}'))
+  source['path'] = request.fs.netnormpath(source['path'])
   outputFormat = json.loads(request.POST.get('destination', '{}'))['outputFormat']
   destination = json.loads(request.POST.get('destination', '{}'))
+  destination['nonDefaultLocation'] = request.fs.netnormpath(destination['nonDefaultLocation'])
   destination['ouputFormat'] = outputFormat # Workaround a very weird bug
   start_time = json.loads(request.POST.get('start_time', '-1'))
 

+ 2 - 0
desktop/libs/notebook/src/notebook/api.py

@@ -632,6 +632,7 @@ def export_result(request):
   api = get_api(request, snippet)
 
   if data_format == 'hdfs-file': # Blocking operation, like downloading
+    destination = request.fs.netnormpath(destination)
     if request.fs.isdir(destination):
       if notebook.get('name'):
         destination += '/%(name)s.csv' % notebook
@@ -672,6 +673,7 @@ def export_result(request):
       'allowed': True
     }
   elif data_format == 'hdfs-directory':
+    destination = request.fs.netnormpath(destination)
     if is_embedded:
       sql, success_url = api.export_large_data_to_hdfs(notebook, snippet, destination)