Browse Source

HUE-8890 [fb] Fix compress/extract with files with spaces

Jean-Francois Desjeans Gauthier 6 years ago
parent
commit
5ae61cc38d

+ 5 - 0
apps/filebrowser/src/filebrowser/views.py

@@ -1350,6 +1350,8 @@ def extract_archive_using_batch_job(request):
 
 
     if upload_path and archive_name:
     if upload_path and archive_name:
       try:
       try:
+        upload_path = urllib.unquote(upload_path)
+        archive_name = urllib.unquote(archive_name)
         response = extract_archive_in_hdfs(request, upload_path, archive_name)
         response = extract_archive_in_hdfs(request, upload_path, archive_name)
       except Exception, e:
       except Exception, e:
         response['message'] = _('Exception occurred while extracting archive: %s' % e)
         response['message'] = _('Exception occurred while extracting archive: %s' % e)
@@ -1371,6 +1373,9 @@ def compress_files_using_batch_job(request):
 
 
     if upload_path and file_names and archive_name:
     if upload_path and file_names and archive_name:
       try:
       try:
+        upload_path = urllib.unquote(upload_path)
+        archive_name = urllib.unquote(archive_name)
+        file_names = [urllib.unquote(name) for name in file_names]
         response = compress_files_in_hdfs(request, file_names, upload_path, archive_name)
         response = compress_files_in_hdfs(request, file_names, upload_path, archive_name)
       except Exception, e:
       except Exception, e:
         response['message'] = _('Exception occurred while compressing files: %s' % e)
         response['message'] = _('Exception occurred while compressing files: %s' % e)

+ 6 - 5
desktop/core/src/desktop/lib/tasks/compress_files/compress_in_hdfs.sh

@@ -42,7 +42,7 @@ while [ "$1" != "" ]; do
             UPLOAD_PATH=$VALUE
             UPLOAD_PATH=$VALUE
             ;;
             ;;
         -f | --file-names)
         -f | --file-names)
-            FILE_NAMES=$VALUE
+            FILE_NAMES="$VALUE"
             ;;
             ;;
         -n | --archive-name)
         -n | --archive-name)
             ARCHIVE_NAME=$VALUE
             ARCHIVE_NAME=$VALUE
@@ -63,21 +63,22 @@ then
 	exit 1
 	exit 1
 fi
 fi
 
 
-FILE_NAMES=(${FILE_NAMES//,/ })
+FILE_NAMES=("${FILE_NAMES//,/ }")
 exit_status=0
 exit_status=0
 
 
 temp_output_dir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
 temp_output_dir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
 echo 'Created temporary output directory: '$temp_output_dir
 echo 'Created temporary output directory: '$temp_output_dir
 
 
 set -x
 set -x
-zip -r $temp_output_dir/$ARCHIVE_NAME ${FILE_NAMES[@]}
+zip -r "$temp_output_dir/$ARCHIVE_NAME" "${FILE_NAMES[@]}"
 exit_status=$(echo $?)
 exit_status=$(echo $?)
 
 
 set +x
 set +x
 if [ $exit_status == 0 ]
 if [ $exit_status == 0 ]
 then
 then
-	echo "Copying hue_compressed.zip to '$UPLOAD_PATH' in HDFS"
-	hadoop fs -put -f $temp_output_dir/$ARCHIVE_NAME $UPLOAD_PATH
+	encoded_output_dir=`python -c "import urllib;print urllib.quote(raw_input())" <<< "$temp_output_dir/$ARCHIVE_NAME"`
+	echo "Copying $encoded_output_dir to '$UPLOAD_PATH' in HDFS"
+	hadoop fs -put -f $encoded_output_dir "$UPLOAD_PATH"
 	exit_status=$(echo $?)
 	exit_status=$(echo $?)
 	if [ $exit_status == 0 ]
 	if [ $exit_status == 0 ]
 	then
 	then

+ 12 - 5
desktop/core/src/desktop/lib/tasks/extract_archive/extract_in_hdfs.sh

@@ -87,17 +87,24 @@ else
 fi
 fi
 
 
 set +x
 set +x
+echo "file_count $temp_output_dir/*"
 extracted_file_count=$(($(find $temp_output_dir/* -type d -maxdepth 0 | wc -l) + $(find $temp_output_dir/* -type f -maxdepth 0 | wc -l)))
 extracted_file_count=$(($(find $temp_output_dir/* -type d -maxdepth 0 | wc -l) + $(find $temp_output_dir/* -type f -maxdepth 0 | wc -l)))
 if [ $extracted_file_count != 0 ] && [ $exit_status == 0 ]
 if [ $extracted_file_count != 0 ] && [ $exit_status == 0 ]
 then
 then
-    if ! $(hadoop fs -test -d $OUTPUT_PATH)
+    echo "test $OUTPUT_PATH"
+    if ! $(hadoop fs -test -d "$OUTPUT_PATH")
     then
     then
         echo "Creating output directory '$OUTPUT_PATH' in HDFS"
         echo "Creating output directory '$OUTPUT_PATH' in HDFS"
-        hadoop fs -mkdir $OUTPUT_PATH
+        hadoop fs -mkdir "$OUTPUT_PATH"
     fi
     fi
-	echo "Copying extracted files to '$OUTPUT_PATH' in HDFS"
-	hadoop fs -put $temp_output_dir/* "$OUTPUT_PATH"
-	exit_status=$(echo $?)
+  readarray -t files <<<"$(ls $temp_output_dir)"
+  for file in "${files[@]}"
+  do
+    encoded_output_dir=`python -c "import urllib;print urllib.quote(raw_input())" <<< "$file"`
+    echo "$temp_output_dir/$encoded_output_dir '$OUTPUT_PATH'"
+    hadoop fs -put $temp_output_dir/$encoded_output_dir "$OUTPUT_PATH"
+  done
+  exit_status=$(echo $?)
 	if [ $exit_status != 0 ]
 	if [ $exit_status != 0 ]
 	then
 	then
 	    echo "Failed to copy files to HDFS directory '$OUTPUT_PATH'."
 	    echo "Failed to copy files to HDFS directory '$OUTPUT_PATH'."