|
|
@@ -15,6 +15,10 @@
|
|
|
# See the License for the specific language governing permissions and
|
|
|
# limitations under the License.
|
|
|
|
|
|
+from future import standard_library
|
|
|
+standard_library.install_aliases()
|
|
|
+from builtins import object
|
|
|
+from builtins import str
|
|
|
import errno
|
|
|
import logging
|
|
|
import mimetypes
|
|
|
@@ -24,12 +28,12 @@ import parquet
|
|
|
import posixpath
|
|
|
import re
|
|
|
import stat as stat_module
|
|
|
-import urllib
|
|
|
-from urlparse import urlparse
|
|
|
+import urllib.request, urllib.parse, urllib.error
|
|
|
+from urllib.parse import urlparse
|
|
|
|
|
|
from bz2 import decompress
|
|
|
from datetime import datetime
|
|
|
-from cStringIO import StringIO
|
|
|
+from io import StringIO
|
|
|
from gzip import GzipFile
|
|
|
|
|
|
from django.contrib.auth.models import User, Group
|
|
|
@@ -126,7 +130,7 @@ def download(request, path):
|
|
|
This is inspired by django.views.static.serve.
|
|
|
?disposition={attachment, inline}
|
|
|
"""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not SHOW_DOWNLOAD_BUTTON.get():
|
|
|
@@ -148,7 +152,7 @@ def download(request, path):
|
|
|
# Verify read permissions on file first
|
|
|
try:
|
|
|
request.fs.read(path, offset=0, length=1)
|
|
|
- except WebHdfsException, e:
|
|
|
+ except WebHdfsException as e:
|
|
|
if e.code == 403:
|
|
|
raise PopupException(_('User %s is not authorized to download file at path "%s"') %
|
|
|
(request.user.username, path))
|
|
|
@@ -175,23 +179,23 @@ def download(request, path):
|
|
|
|
|
|
def view(request, path):
|
|
|
"""Dispatches viewing of a path to either index() or fileview(), depending on type."""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
# default_to_home is set in bootstrap.js
|
|
|
if 'default_to_home' in request.GET:
|
|
|
home_dir_path = request.user.get_home_directory()
|
|
|
if request.fs.isdir(home_dir_path):
|
|
|
- return format_preserving_redirect(request, '/filebrowser/view=' + urllib.quote(home_dir_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
+ return format_preserving_redirect(request, '/filebrowser/view=' + urllib.parse.quote(home_dir_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
|
|
|
# default_to_home is set in bootstrap.js
|
|
|
if 'default_to_trash' in request.GET:
|
|
|
home_trash_path = _home_trash_path(request.fs, request.user, path)
|
|
|
if request.fs.isdir(home_trash_path):
|
|
|
- return format_preserving_redirect(request, '/filebrowser/view=' + urllib.quote(home_trash_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
+ return format_preserving_redirect(request, '/filebrowser/view=' + urllib.parse.quote(home_trash_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
trash_path = request.fs.trash_path(path)
|
|
|
if request.fs.isdir(trash_path):
|
|
|
- return format_preserving_redirect(request, '/filebrowser/view=' + urllib.quote(trash_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
+ return format_preserving_redirect(request, '/filebrowser/view=' + urllib.parse.quote(trash_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))
|
|
|
|
|
|
try:
|
|
|
stats = request.fs.stats(path)
|
|
|
@@ -199,7 +203,7 @@ def view(request, path):
|
|
|
return listdir_paged(request, path)
|
|
|
else:
|
|
|
return display(request, path)
|
|
|
- except S3FileSystemException, e:
|
|
|
+ except S3FileSystemException as e:
|
|
|
msg = _("S3 filesystem exception.")
|
|
|
if request.is_ajax():
|
|
|
exception = {
|
|
|
@@ -208,7 +212,7 @@ def view(request, path):
|
|
|
return JsonResponse(exception)
|
|
|
else:
|
|
|
raise PopupException(msg, detail=e)
|
|
|
- except (IOError, WebHdfsException), e:
|
|
|
+ except (IOError, WebHdfsException) as e:
|
|
|
msg = _("Cannot access: %(path)s. ") % {'path': escape(path)}
|
|
|
|
|
|
if "Connection refused" in e.message:
|
|
|
@@ -232,7 +236,7 @@ def _home_trash_path(fs, user, path):
|
|
|
|
|
|
|
|
|
def home_relative_view(request, path):
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
home_dir_path = request.user.get_home_directory()
|
|
|
@@ -244,12 +248,12 @@ def home_relative_view(request, path):
|
|
|
|
|
|
def edit(request, path, form=None):
|
|
|
"""Shows an edit form for the given path. Path does not necessarily have to exist."""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
try:
|
|
|
stats = request.fs.stats(path)
|
|
|
- except IOError, ioe:
|
|
|
+ except IOError as ioe:
|
|
|
# A file not found is OK, otherwise re-raise
|
|
|
if ioe.errno == errno.ENOENT:
|
|
|
stats = None
|
|
|
@@ -270,7 +274,7 @@ def edit(request, path, form=None):
|
|
|
f = request.fs.open(path)
|
|
|
try:
|
|
|
try:
|
|
|
- current_contents = unicode(f.read(), encoding)
|
|
|
+ current_contents = str(f.read(), encoding)
|
|
|
except UnicodeDecodeError:
|
|
|
raise PopupException(_("File is not encoded in %(encoding)s; cannot be edited: %(path)s.") % {'encoding': encoding, 'path': path})
|
|
|
finally:
|
|
|
@@ -302,7 +306,7 @@ def save_file(request):
|
|
|
form = EditorForm(request.POST)
|
|
|
is_valid = form.is_valid()
|
|
|
path = form.cleaned_data.get('path')
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
|
|
|
@@ -325,9 +329,9 @@ def save_file(request):
|
|
|
do_overwrite_save(request.fs, path, data)
|
|
|
else:
|
|
|
request.fs.create(path, overwrite=False, data=data)
|
|
|
- except WebHdfsException, e:
|
|
|
+ except WebHdfsException as e:
|
|
|
raise PopupException(_("The file could not be saved"), detail=e.message.splitlines()[0])
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
raise PopupException(_("The file could not be saved"), detail=e)
|
|
|
|
|
|
request.path = reverse("filebrowser_views_edit", kwargs=dict(path=path))
|
|
|
@@ -341,7 +345,7 @@ def parse_breadcrumbs(path):
|
|
|
if url and not url.endswith('/'):
|
|
|
url += '/'
|
|
|
url += part
|
|
|
- breadcrumbs.append({'url': urllib.quote(url.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS), 'label': part})
|
|
|
+ breadcrumbs.append({'url': urllib.parse.quote(url.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS), 'label': part})
|
|
|
return breadcrumbs
|
|
|
|
|
|
|
|
|
@@ -351,7 +355,7 @@ def listdir(request, path):
|
|
|
|
|
|
Intended to be called via view().
|
|
|
"""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not request.fs.isdir(path):
|
|
|
@@ -369,8 +373,8 @@ def listdir(request, path):
|
|
|
'path': path,
|
|
|
'file_filter': file_filter,
|
|
|
'breadcrumbs': breadcrumbs,
|
|
|
- 'current_dir_path': urllib.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI),
|
|
|
- 'current_request_path': '/filebrowser/view=' + urllib.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
+ 'current_dir_path': urllib.parse.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI),
|
|
|
+ 'current_request_path': '/filebrowser/view=' + urllib.parse.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
'home_directory': home_dir_path if home_dir_path and request.fs.isdir(home_dir_path) else None,
|
|
|
'cwd_set': True,
|
|
|
'is_superuser': request.user.username == request.fs.superuser,
|
|
|
@@ -433,7 +437,7 @@ def listdir_paged(request, path):
|
|
|
filter=? - Specify a substring filter to search for in
|
|
|
the filename field.
|
|
|
"""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not request.fs.isdir(path):
|
|
|
@@ -462,7 +466,7 @@ def listdir_paged(request, path):
|
|
|
# Filter first
|
|
|
filter_str = request.GET.get('filter', None)
|
|
|
if filter_str:
|
|
|
- filtered_stats = filter(lambda sb: filter_str in sb['name'], all_stats)
|
|
|
+ filtered_stats = [sb for sb in all_stats if filter_str in sb['name']]
|
|
|
all_stats = filtered_stats
|
|
|
|
|
|
# Sort next
|
|
|
@@ -516,7 +520,7 @@ def listdir_paged(request, path):
|
|
|
data = {
|
|
|
'path': path,
|
|
|
'breadcrumbs': breadcrumbs,
|
|
|
- 'current_request_path': '/filebrowser/view=' + urllib.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
+ 'current_request_path': '/filebrowser/view=' + urllib.parse.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
'is_trash_enabled': is_trash_enabled,
|
|
|
'files': page.object_list if page else [],
|
|
|
'page': _massage_page(page, paginator) if page else {},
|
|
|
@@ -526,14 +530,14 @@ def listdir_paged(request, path):
|
|
|
# The following should probably be deprecated
|
|
|
'cwd_set': True,
|
|
|
'file_filter': 'any',
|
|
|
- 'current_dir_path': urllib.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI),
|
|
|
+ 'current_dir_path': urllib.parse.quote(path.encode('utf-8'), safe=SAFE_CHARACTERS_URI),
|
|
|
'is_fs_superuser': is_fs_superuser,
|
|
|
'groups': is_fs_superuser and [str(x) for x in Group.objects.values_list('name', flat=True)] or [],
|
|
|
'users': is_fs_superuser and [str(x) for x in User.objects.values_list('username', flat=True)] or [],
|
|
|
'superuser': request.fs.superuser,
|
|
|
'supergroup': request.fs.supergroup,
|
|
|
'is_sentry_managed': request.fs.is_sentry_managed(path),
|
|
|
- 'apps': appmanager.get_apps_dict(request.user).keys(),
|
|
|
+ 'apps': list(appmanager.get_apps_dict(request.user).keys()),
|
|
|
'show_download_button': SHOW_DOWNLOAD_BUTTON.get(),
|
|
|
'show_upload_button': SHOW_UPLOAD_BUTTON.get(),
|
|
|
'is_embeddable': request.GET.get('is_embeddable', False),
|
|
|
@@ -567,7 +571,7 @@ def _massage_stats(request, stats):
|
|
|
'type': filetype(stats['mode']),
|
|
|
'rwx': rwx(stats['mode'], stats['aclBit']),
|
|
|
'mode': stringformat(stats['mode'], "o"),
|
|
|
- 'url': '/filebrowser/view=' + urllib.quote(normalized.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
+ 'url': '/filebrowser/view=' + urllib.parse.quote(normalized.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS),
|
|
|
'is_sentry_managed': request.fs.is_sentry_managed(path)
|
|
|
}
|
|
|
|
|
|
@@ -579,7 +583,7 @@ def stat(request, path):
|
|
|
Intended for use via AJAX (and hence doesn't provide
|
|
|
an HTML view).
|
|
|
"""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not request.fs.exists(path):
|
|
|
@@ -589,7 +593,7 @@ def stat(request, path):
|
|
|
|
|
|
|
|
|
def content_summary(request, path):
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not request.fs.exists(path):
|
|
|
@@ -601,7 +605,7 @@ def content_summary(request, path):
|
|
|
stats.summary.update({'replication': replication_factor})
|
|
|
response['status'] = 0
|
|
|
response['summary'] = stats.summary
|
|
|
- except WebHdfsException, e:
|
|
|
+ except WebHdfsException as e:
|
|
|
response['message'] = _("The file could not be saved") + e.message.splitlines()[0]
|
|
|
return JsonResponse(response)
|
|
|
|
|
|
@@ -621,7 +625,7 @@ def display(request, path):
|
|
|
sequence files, decompress gzipped text files, etc.).
|
|
|
There exists a python-magic package to interface with libmagic.
|
|
|
"""
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
if not request.fs.isfile(path):
|
|
|
@@ -679,7 +683,7 @@ def display(request, path):
|
|
|
# Get contents as string for text mode, or at least try
|
|
|
uni_contents = None
|
|
|
if not mode or mode == 'text':
|
|
|
- uni_contents = unicode(contents, encoding, errors='replace')
|
|
|
+ uni_contents = str(contents, encoding, errors='replace')
|
|
|
is_binary = uni_contents.find(i18n.REPLACEMENT_CHAR) != -1
|
|
|
# Auto-detect mode
|
|
|
if not mode:
|
|
|
@@ -743,7 +747,7 @@ def read_contents(codec_type, path, fs, offset, length):
|
|
|
"""
|
|
|
contents = ''
|
|
|
fhandle = None
|
|
|
- decoded_path = urllib.unquote(path)
|
|
|
+ decoded_path = urllib.parse.unquote(path)
|
|
|
if path != decoded_path:
|
|
|
path = decoded_path
|
|
|
try:
|
|
|
@@ -798,7 +802,7 @@ def _decompress_snappy(compressed_content):
|
|
|
try:
|
|
|
import snappy
|
|
|
return snappy.decompress(compressed_content)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
raise PopupException(_('Failed to decompress snappy compressed file.'), detail=e)
|
|
|
|
|
|
|
|
|
@@ -833,7 +837,7 @@ def _read_avro(fhandle, path, offset, length, stats):
|
|
|
data_file_reader.close()
|
|
|
|
|
|
contents = "".join(contents_list)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
logging.exception('Could not read avro file at "%s": %s' % (path, e))
|
|
|
raise PopupException(_("Failed to read Avro file."))
|
|
|
return contents
|
|
|
@@ -848,7 +852,7 @@ def _read_parquet(fhandle, path, offset, length, stats):
|
|
|
parquet._dump(data, ParquetOptions(limit=1000), out=dumped_data)
|
|
|
dumped_data.seek(offset)
|
|
|
return dumped_data.read()
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
logging.exception('Could not read parquet file at "%s": %s' % (path, e))
|
|
|
raise PopupException(_("Failed to read Parquet file."))
|
|
|
|
|
|
@@ -859,7 +863,7 @@ def _read_gzip(fhandle, path, offset, length, stats):
|
|
|
raise PopupException(_("Offsets are not supported with Gzip compression."))
|
|
|
try:
|
|
|
contents = GzipFile('', 'r', 0, StringIO(fhandle.read())).read(length)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
logging.exception('Could not decompress file at "%s": %s' % (path, e))
|
|
|
raise PopupException(_("Failed to decompress file."))
|
|
|
return contents
|
|
|
@@ -869,7 +873,7 @@ def _read_bz2(fhandle, path, offset, length, stats):
|
|
|
contents = ''
|
|
|
try:
|
|
|
contents = decompress(fhandle.read(length))
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
logging.exception('Could not decompress file at "%s": %s' % (path, e))
|
|
|
raise PopupException(_("Failed to decompress file."))
|
|
|
return contents
|
|
|
@@ -880,7 +884,7 @@ def _read_simple(fhandle, path, offset, length, stats):
|
|
|
try:
|
|
|
fhandle.seek(offset)
|
|
|
contents = fhandle.read(length)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
logging.exception('Could not read file at "%s": %s' % (path, e))
|
|
|
raise PopupException(_("Failed to read file."))
|
|
|
return contents
|
|
|
@@ -1082,17 +1086,17 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
|
|
|
args = arg_extractor(request, form, parameter_names)
|
|
|
try:
|
|
|
op(*args)
|
|
|
- except (IOError, WebHdfsException), e:
|
|
|
+ except (IOError, WebHdfsException) as e:
|
|
|
msg = _("Cannot perform operation.")
|
|
|
# TODO: Only apply this message for HDFS
|
|
|
if is_admin(request.user) and not _is_hdfs_superuser(request):
|
|
|
msg += _(' Note: you are a Hue admin but not a HDFS superuser, "%(superuser)s" or part of HDFS supergroup, "%(supergroup)s".') \
|
|
|
% {'superuser': request.fs.superuser, 'supergroup': request.fs.supergroup}
|
|
|
raise PopupException(msg, detail=e)
|
|
|
- except S3FileSystemException, e:
|
|
|
+ except S3FileSystemException as e:
|
|
|
msg = _("S3 filesystem exception.")
|
|
|
raise PopupException(msg, detail=e)
|
|
|
- except NotImplementedError, e:
|
|
|
+ except NotImplementedError as e:
|
|
|
msg = _("Cannot perform operation.")
|
|
|
raise PopupException(msg, detail=e)
|
|
|
|
|
|
@@ -1105,7 +1109,7 @@ def generic_op(form_class, request, op, parameter_names, piggyback=None, templat
|
|
|
if piggyback:
|
|
|
piggy_path = form.cleaned_data.get(piggyback)
|
|
|
ret["result"] = _massage_stats(request, stat_absolute_path(piggy_path ,request.fs.stats(piggy_path)))
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
# Hard to report these more naturally here. These happen either
|
|
|
# because of a bug in the piggy-back code or because of a
|
|
|
# race condition.
|
|
|
@@ -1132,7 +1136,7 @@ def rename(request):
|
|
|
raise PopupException(_("Could not rename folder \"%s\" to \"%s\": Hashes are not allowed in filenames." % (src_path, dest_path)))
|
|
|
if "/" not in dest_path:
|
|
|
src_dir = os.path.dirname(src_path)
|
|
|
- dest_path = request.fs.join(urllib.unquote(src_dir), urllib.unquote(dest_path))
|
|
|
+ dest_path = request.fs.join(urllib.parse.unquote(src_dir), urllib.parse.unquote(dest_path))
|
|
|
if request.fs.exists(dest_path):
|
|
|
raise PopupException(_('The destination path "%s" already exists.') % dest_path)
|
|
|
request.fs.rename(src_path, dest_path)
|
|
|
@@ -1141,7 +1145,7 @@ def rename(request):
|
|
|
|
|
|
def set_replication(request):
|
|
|
def smart_set_replication(src_path, replication_factor):
|
|
|
- result = request.fs.set_replication(urllib.unquote(src_path), replication_factor)
|
|
|
+ result = request.fs.set_replication(urllib.parse.unquote(src_path), replication_factor)
|
|
|
if not result:
|
|
|
raise PopupException(_("Setting of replication factor failed"))
|
|
|
|
|
|
@@ -1154,7 +1158,7 @@ def mkdir(request):
|
|
|
# No absolute directory specification allowed.
|
|
|
if posixpath.sep in name or "#" in name:
|
|
|
raise PopupException(_("Could not name folder \"%s\": Slashes or hashes are not allowed in filenames." % name))
|
|
|
- request.fs.mkdir(request.fs.join(urllib.unquote(path), urllib.unquote(name)))
|
|
|
+ request.fs.mkdir(request.fs.join(urllib.parse.unquote(path), urllib.parse.unquote(name)))
|
|
|
|
|
|
return generic_op(MkDirForm, request, smart_mkdir, ["path", "name"], "path")
|
|
|
|
|
|
@@ -1164,7 +1168,7 @@ def touch(request):
|
|
|
# No absolute path specification allowed.
|
|
|
if posixpath.sep in name:
|
|
|
raise PopupException(_("Could not name file \"%s\": Slashes are not allowed in filenames." % name))
|
|
|
- request.fs.create(request.fs.join(urllib.unquote(path), urllib.unquote(name)))
|
|
|
+ request.fs.create(request.fs.join(urllib.parse.unquote(path), urllib.parse.unquote(name)))
|
|
|
|
|
|
return generic_op(TouchForm, request, smart_touch, ["path", "name"], "path")
|
|
|
|
|
|
@@ -1174,7 +1178,7 @@ def rmtree(request):
|
|
|
params = ["path"]
|
|
|
def bulk_rmtree(*args, **kwargs):
|
|
|
for arg in args:
|
|
|
- request.fs.do_as_user(request.user, request.fs.rmtree, urllib.unquote(arg['path']), 'skip_trash' in request.GET)
|
|
|
+ request.fs.do_as_user(request.user, request.fs.rmtree, urllib.parse.unquote(arg['path']), 'skip_trash' in request.GET)
|
|
|
return generic_op(RmTreeFormSet, request, bulk_rmtree, ["path"], None,
|
|
|
data_extractor=formset_data_extractor(recurring, params),
|
|
|
arg_extractor=formset_arg_extractor,
|
|
|
@@ -1189,7 +1193,7 @@ def move(request):
|
|
|
for arg in args:
|
|
|
if arg['src_path'] == arg['dest_path']:
|
|
|
raise PopupException(_('Source path and destination path cannot be same'))
|
|
|
- request.fs.rename(urllib.unquote(arg['src_path']), urllib.unquote(arg['dest_path']))
|
|
|
+ request.fs.rename(urllib.parse.unquote(arg['src_path']), urllib.parse.unquote(arg['dest_path']))
|
|
|
return generic_op(RenameFormSet, request, bulk_move, ["src_path", "dest_path"], None,
|
|
|
data_extractor=formset_data_extractor(recurring, params),
|
|
|
arg_extractor=formset_arg_extractor,
|
|
|
@@ -1204,7 +1208,7 @@ def copy(request):
|
|
|
for arg in args:
|
|
|
if arg['src_path'] == arg['dest_path']:
|
|
|
raise PopupException(_('Source path and destination path cannot be same'))
|
|
|
- request.fs.copy(urllib.unquote(arg['src_path']), urllib.unquote(arg['dest_path']), recursive=True, owner=request.user)
|
|
|
+ request.fs.copy(urllib.parse.unquote(arg['src_path']), urllib.parse.unquote(arg['dest_path']), recursive=True, owner=request.user)
|
|
|
return generic_op(CopyFormSet, request, bulk_copy, ["src_path", "dest_path"], None,
|
|
|
data_extractor=formset_data_extractor(recurring, params),
|
|
|
arg_extractor=formset_arg_extractor,
|
|
|
@@ -1218,7 +1222,7 @@ def chmod(request):
|
|
|
def bulk_chmod(*args, **kwargs):
|
|
|
op = curry(request.fs.chmod, recursive=request.POST.get('recursive', False))
|
|
|
for arg in args:
|
|
|
- op(urllib.unquote(arg['path']), arg['mode'])
|
|
|
+ op(urllib.parse.unquote(arg['path']), arg['mode'])
|
|
|
# mode here is abused: on input, it's a string, but when retrieved,
|
|
|
# it's an int.
|
|
|
return generic_op(ChmodFormSet, request, bulk_chmod, ['path', 'mode'], "path",
|
|
|
@@ -1243,7 +1247,7 @@ def chown(request):
|
|
|
def bulk_chown(*args, **kwargs):
|
|
|
op = curry(request.fs.chown, recursive=request.POST.get('recursive', False))
|
|
|
for arg in args:
|
|
|
- varg = [urllib.unquote(arg[param]) for param in param_names]
|
|
|
+ varg = [urllib.parse.unquote(arg[param]) for param in param_names]
|
|
|
op(*varg)
|
|
|
|
|
|
return generic_op(ChownFormSet, request, bulk_chown, param_names, "path",
|
|
|
@@ -1258,7 +1262,7 @@ def trash_restore(request):
|
|
|
params = ["path"]
|
|
|
def bulk_restore(*args, **kwargs):
|
|
|
for arg in args:
|
|
|
- request.fs.do_as_user(request.user, request.fs.restore, urllib.unquote(arg['path']))
|
|
|
+ request.fs.do_as_user(request.user, request.fs.restore, urllib.parse.unquote(arg['path']))
|
|
|
return generic_op(RestoreFormSet, request, bulk_restore, ["path"], None,
|
|
|
data_extractor=formset_data_extractor(recurring, params),
|
|
|
arg_extractor=formset_arg_extractor,
|
|
|
@@ -1283,7 +1287,7 @@ def upload_file(request):
|
|
|
try:
|
|
|
resp = _upload_file(request)
|
|
|
response.update(resp)
|
|
|
- except Exception, ex:
|
|
|
+ except Exception as ex:
|
|
|
response['data'] = str(ex).split('\n', 1)[0]
|
|
|
hdfs_file = request.FILES.get('hdfs_file')
|
|
|
if hdfs_file and hasattr(hdfs_file, 'remove'): # TODO: Call from proxyFS
|
|
|
@@ -1307,7 +1311,7 @@ def _upload_file(request):
|
|
|
|
|
|
if form.is_valid():
|
|
|
uploaded_file = request.FILES['hdfs_file']
|
|
|
- dest = scheme_absolute_path(urllib.unquote(request.GET['dest']), urllib.unquote(form.cleaned_data['dest']))
|
|
|
+ dest = scheme_absolute_path(urllib.parse.unquote(request.GET['dest']), urllib.parse.unquote(form.cleaned_data['dest']))
|
|
|
filepath = request.fs.join(dest, uploaded_file.name)
|
|
|
|
|
|
if request.fs.isdir(dest) and posixpath.sep in uploaded_file.name:
|
|
|
@@ -1317,7 +1321,7 @@ def _upload_file(request):
|
|
|
request.fs.upload(file=uploaded_file, path=dest, username=request.user.username)
|
|
|
response['status'] = 0
|
|
|
|
|
|
- except IOError, ex:
|
|
|
+ except IOError as ex:
|
|
|
already_exists = False
|
|
|
try:
|
|
|
already_exists = request.fs.exists(dest)
|
|
|
@@ -1353,7 +1357,7 @@ def extract_archive_using_batch_job(request):
|
|
|
upload_path = urllib.unquote(upload_path)
|
|
|
archive_name = urllib.unquote(archive_name)
|
|
|
response = extract_archive_in_hdfs(request, upload_path, archive_name)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
response['message'] = _('Exception occurred while extracting archive: %s' % e)
|
|
|
else:
|
|
|
response['message'] = _('ERROR: Configuration parameter enable_extract_uploaded_archive ' +
|
|
|
@@ -1377,7 +1381,7 @@ def compress_files_using_batch_job(request):
|
|
|
archive_name = urllib.unquote(archive_name)
|
|
|
file_names = [urllib.unquote(name) for name in file_names]
|
|
|
response = compress_files_in_hdfs(request, file_names, upload_path, archive_name)
|
|
|
- except Exception, e:
|
|
|
+ except Exception as e:
|
|
|
response['message'] = _('Exception occurred while compressing files: %s' % e)
|
|
|
else:
|
|
|
response['message'] = _('Error: Output directory is not set.');
|