浏览代码

HUE-8728 [jb] Redirect client for file download.

jdesjean 6 年之前
父节点
当前提交
4cb5abd8e8

+ 7 - 0
apps/filebrowser/src/filebrowser/conf.py

@@ -58,3 +58,10 @@ ENABLE_EXTRACT_UPLOADED_ARCHIVE = Config(
   type=bool,
   type=bool,
   dynamic_default=is_oozie_enabled
   dynamic_default=is_oozie_enabled
 )
 )
+
+REDIRECT_DOWNLOAD = Config(
+  key="redirect_download",
+  help=_(
+    'Redirect client to WebHdfs or S3 for file download. Note: Turning this on will override notebook/redirect_whitelist for user selected file downloads on WebHdfs & S3.'),
+  type=coerce_bool,
+  default=True)

+ 10 - 9
apps/filebrowser/src/filebrowser/views.py

@@ -36,7 +36,7 @@ from django.contrib.auth.models import User, Group
 from django.core.paginator import EmptyPage, Paginator, Page, InvalidPage
 from django.core.paginator import EmptyPage, Paginator, Page, InvalidPage
 from django.urls import reverse
 from django.urls import reverse
 from django.template.defaultfilters import stringformat, filesizeformat
 from django.template.defaultfilters import stringformat, filesizeformat
-from django.http import Http404, StreamingHttpResponse, HttpResponseNotModified, HttpResponseForbidden, HttpResponse
+from django.http import Http404, StreamingHttpResponse, HttpResponseNotModified, HttpResponseForbidden, HttpResponse, HttpResponseRedirect
 from django.views.decorators.http import require_http_methods
 from django.views.decorators.http import require_http_methods
 from django.views.static import was_modified_since
 from django.views.static import was_modified_since
 from django.shortcuts import redirect
 from django.shortcuts import redirect
@@ -64,10 +64,7 @@ from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.fsutils import do_overwrite_save
 from hadoop.fs.fsutils import do_overwrite_save
 
 
-from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE
-from filebrowser.conf import MAX_SNAPPY_DECOMPRESSION_SIZE
-from filebrowser.conf import SHOW_DOWNLOAD_BUTTON
-from filebrowser.conf import SHOW_UPLOAD_BUTTON
+from filebrowser.conf import ENABLE_EXTRACT_UPLOADED_ARCHIVE, MAX_SNAPPY_DECOMPRESSION_SIZE, SHOW_DOWNLOAD_BUTTON, SHOW_UPLOAD_BUTTON, REDIRECT_DOWNLOAD
 from filebrowser.lib.archives import archive_factory
 from filebrowser.lib.archives import archive_factory
 from filebrowser.lib.rwx import filetype, rwx
 from filebrowser.lib.rwx import filetype, rwx
 from filebrowser.lib import xxd
 from filebrowser.lib import xxd
@@ -164,10 +161,14 @@ def download(request, path):
         else:
         else:
             raise PopupException(_('Failed to download file at path "%s": %s') % (path, e))
             raise PopupException(_('Failed to download file at path "%s": %s') % (path, e))
 
 
-    response = StreamingHttpResponse(_file_reader(fh), content_type=content_type)
-    response["Last-Modified"] = http_date(stats['mtime'])
-    response["Content-Length"] = stats['size']
-    response['Content-Disposition'] = request.GET.get('disposition', 'attachment') if _can_inline_display(path) else 'attachment'
+    if REDIRECT_DOWNLOAD.get() and hasattr(fh, 'read_url'):
+      response = HttpResponseRedirect(fh.read_url())
+      setattr(response, 'redirect_override', True)
+    else:
+      response = StreamingHttpResponse(_file_reader(fh), content_type=content_type)
+      response["Last-Modified"] = http_date(stats['mtime'])
+      response["Content-Length"] = stats['size']
+      response['Content-Disposition'] = request.GET.get('disposition', 'attachment') if _can_inline_display(path) else 'attachment'
 
 
     request.audit = {
     request.audit = {
         'operation': 'DOWNLOAD',
         'operation': 'DOWNLOAD',

+ 5 - 0
desktop/conf.dist/hue.ini

@@ -1302,6 +1302,8 @@
   # Flag to enable the extraction of a uploaded archive in HDFS.
   # Flag to enable the extraction of a uploaded archive in HDFS.
   ## enable_extract_uploaded_archive=true
   ## enable_extract_uploaded_archive=true
 
 
+  # Redirect client to WebHdfs or S3 for file download. Note: Turning this on will override notebook/redirect_whitelist for user selected file downloads on WebHdfs & S3.
+  ## redirect_download=true
 
 
 ###########################################################################
 ###########################################################################
 # Settings to configure Pig
 # Settings to configure Pig
@@ -1581,6 +1583,9 @@
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
 
 
+      # The time in seconds before a delegate key is expired. Used when filebrowser/redirect_download is used. Default to 4 Hours.
+      ## key_expiry=14400
+
 ###########################################################################
 ###########################################################################
 # Settings for the Azure lib
 # Settings for the Azure lib
 ###########################################################################
 ###########################################################################

+ 6 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1303,6 +1303,9 @@
   # Flag to enable the extraction of a uploaded archive in HDFS.
   # Flag to enable the extraction of a uploaded archive in HDFS.
   ## enable_extract_uploaded_archive=true
   ## enable_extract_uploaded_archive=true
 
 
+  # Redirect client to WebHdfs or S3 for file download. Note: Turning this on will override notebook/redirect_whitelist for user selected file downloads on WebHdfs & S3.
+  ## redirect_download=true
+
 
 
 ###########################################################################
 ###########################################################################
 # Settings to configure Pig
 # Settings to configure Pig
@@ -1582,6 +1585,9 @@
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       # e.g. Use boto.s3.connection.OrdinaryCallingFormat for https://s3.amazonaws.com/<bucket-name>
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
       ## calling_format=boto.s3.connection.OrdinaryCallingFormat
 
 
+      # The time in seconds before a delegate key is expired. Used when filebrowser/redirect_download is used. Default to 4 Hours.
+      ## key_expiry=14400
+
 ###########################################################################
 ###########################################################################
 # Settings for the Azure lib
 # Settings for the Azure lib
 ###########################################################################
 ###########################################################################

+ 1 - 1
desktop/core/src/desktop/middleware.py

@@ -673,7 +673,7 @@ class EnsureSafeRedirectURLMiddleware(object):
   Middleware to white list configured redirect URLs.
   Middleware to white list configured redirect URLs.
   """
   """
   def process_response(self, request, response):
   def process_response(self, request, response):
-    if response.status_code in (301, 302, 303, 305, 307, 308) and response.get('Location'):
+    if response.status_code in (301, 302, 303, 305, 307, 308) and response.get('Location') and not hasattr(response, 'redirect_override'):
       redirection_patterns = desktop.conf.REDIRECT_WHITELIST.get()
       redirection_patterns = desktop.conf.REDIRECT_WHITELIST.get()
       location = response['Location']
       location = response['Location']
 
 

+ 12 - 0
desktop/libs/aws/src/aws/conf.py

@@ -99,6 +99,12 @@ def get_default_region():
 
 
   return region
   return region
 
 
+def get_key_expiry():
+  if 'default' in AWS_ACCOUNTS:
+    return AWS_ACCOUNTS['default'].KEY_EXPIRY.get()
+  else:
+    return 86400
+
 
 
 AWS_ACCOUNTS = UnspecifiedConfigSection(
 AWS_ACCOUNTS = UnspecifiedConfigSection(
   'aws_accounts',
   'aws_accounts',
@@ -185,6 +191,12 @@ AWS_ACCOUNTS = UnspecifiedConfigSection(
         key='is_secure',
         key='is_secure',
         default=True,
         default=True,
         type=coerce_bool
         type=coerce_bool
+      ),
+      KEY_EXPIRY=Config(
+        help=_('The time in seconds before a delegate key is expired. Used when filebrowser/redirect_download is used. Default to 4 Hours.'),
+        key='key_expiry',
+        default=14400,
+        type=int
       )
       )
     )
     )
   )
   )

+ 4 - 1
desktop/libs/aws/src/aws/s3/s3file.py

@@ -20,11 +20,11 @@ import errno
 
 
 from boto.s3.keyfile import KeyFile
 from boto.s3.keyfile import KeyFile
 
 
+from aws.conf import get_key_expiry
 from aws.s3 import translate_s3_error
 from aws.s3 import translate_s3_error
 
 
 DEFAULT_READ_SIZE = 1024 * 1024  # 1MB
 DEFAULT_READ_SIZE = 1024 * 1024  # 1MB
 
 
-
 def open(key, mode='r'):
 def open(key, mode='r'):
   if mode == 'r':
   if mode == 'r':
     return _ReadableS3File(key)
     return _ReadableS3File(key)
@@ -37,6 +37,9 @@ class _ReadableS3File(KeyFile):
       key_copy = key.bucket.get_key(key.name, validate=False)
       key_copy = key.bucket.get_key(key.name, validate=False)
       KeyFile.__init__(self, key_copy)
       KeyFile.__init__(self, key_copy)
 
 
+  def read_url(self):
+    return self.getkey().generate_url(get_key_expiry())
+
   @translate_s3_error
   @translate_s3_error
   def read(self, length=DEFAULT_READ_SIZE):
   def read(self, length=DEFAULT_READ_SIZE):
     return KeyFile.read(self, length)
     return KeyFile.read(self, length)

+ 3 - 0
desktop/libs/azure/src/azure/adls/webhdfs.py

@@ -94,6 +94,9 @@ class WebHdfs(HadoopWebHdfs):
       "Authorization": self._auth_provider.get_token(),
       "Authorization": self._auth_provider.get_token(),
     }
     }
 
 
+  def is_web_accessible(self):
+    return False # Does not support OP=GETDELEGATIONTOKEN HADOOP-14579
+
   def get_upload_chuck_size(self):
   def get_upload_chuck_size(self):
     return UPLOAD_CHUCK_SIZE
     return UPLOAD_CHUCK_SIZE
 
 

+ 29 - 1
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -25,6 +25,7 @@ import posixpath
 import stat
 import stat
 import threading
 import threading
 import time
 import time
+import urllib
 
 
 from urlparse import urlparse
 from urlparse import urlparse
 from django.utils.encoding import smart_str
 from django.utils.encoding import smart_str
@@ -522,6 +523,29 @@ class WebHdfs(Hdfs):
       if key.lower() == "path":
       if key.lower() == "path":
         return self.normpath(value)
         return self.normpath(value)
 
 
+  def is_web_accessible(self):
+    return True
+
+  def read_url(self, path, offset=0, length=None, bufsize=None):
+    """
+    read(path, offset, length[, bufsize]) -> data
+
+    Read data from a file.
+    """
+    path = self.strip_normpath(path)
+    params = self._getparams()
+    params['op'] = 'OPEN'
+    params['offset'] = long(offset)
+    if length is not None:
+      params['length'] = long(length)
+    if bufsize is not None:
+      params['bufsize'] = bufsize
+    if self._security_enabled:
+      token = self.get_delegation_token(self.user)
+      if token:
+        params['delegation'] = token
+    quoted_path = urllib.quote(smart_str(path))
+    return self._client._make_url(quoted_path, params)
 
 
   def read(self, path, offset, length, bufsize=None):
   def read(self, path, offset, length, bufsize=None):
     """
     """
@@ -855,7 +879,7 @@ class WebHdfs(Hdfs):
     params['renewer'] = renewer
     params['renewer'] = renewer
     headers = self._getheaders()
     headers = self._getheaders()
     res = self._root.get(params=params, headers=headers)
     res = self._root.get(params=params, headers=headers)
-    return res['Token']['urlString']
+    return res['Token'] and res['Token']['urlString']
 
 
 
 
   def do_as_user(self, username, fn, *args, **kwargs):
   def do_as_user(self, username, fn, *args, **kwargs):
@@ -906,6 +930,10 @@ class File(object):
     self._path = fs_normpath(path)
     self._path = fs_normpath(path)
     self._pos = 0
     self._pos = 0
     self._mode = mode
     self._mode = mode
+    if fs.is_web_accessible():
+      def read_url(fs=fs):
+        return fs.read_url(self._path, self._pos)
+      self.read_url = read_url
 
 
     try:
     try:
       self._stat = fs.stats(path)
       self._stat = fs.stats(path)