Преглед на файлове

HUE-6101 [fb] Archives with space in the name cannot be extracted using batch job

Oozie doesn't accept arguments with space inside <file> tag.
So we need to replace spaces with '%20'
krish преди 8 години
родител
ревизия
36727ebea8

BIN
apps/filebrowser/src/filebrowser/test_data/te st.zip


+ 4 - 3
apps/filebrowser/src/filebrowser/views_test.py

@@ -921,8 +921,8 @@ alert("XSS")
 
     USER_NAME = 'test'
     HDFS_DEST_DIR = prefix + "/tmp/fb-upload-test"
-    ZIP_FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test.zip')
-    HDFS_ZIP_FILE = HDFS_DEST_DIR + '/test.zip'
+    ZIP_FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/te st.zip')
+    HDFS_ZIP_FILE = HDFS_DEST_DIR + '/te st.zip'
     try:
       self.cluster.fs.mkdir(HDFS_DEST_DIR)
       self.cluster.fs.chown(HDFS_DEST_DIR, USER_NAME)
@@ -936,10 +936,11 @@ alert("XSS")
       assert_true(self.cluster.fs.exists(HDFS_ZIP_FILE))
 
       resp = self.c.post('/filebrowser/extract_archive',
-                         dict(upload_path=HDFS_DEST_DIR, archive_name='test.zip'))
+                         dict(upload_path=HDFS_DEST_DIR, archive_name='te st.zip'))
       response = json.loads(resp.content)
       assert_equal(0, response['status'], response)
       assert_true('handle' in response and response['handle']['id'], response)
+
     finally:
       cleanup_file(self.cluster, HDFS_ZIP_FILE)
 

+ 9 - 9
desktop/core/src/desktop/lib/tasks/extract_archive/extract_in_hdfs.sh

@@ -55,7 +55,7 @@ while [ "$1" != "" ]; do
     shift
 done
 
-if [ -z $UPLOAD_PATH ] || [ -z $FILE_NAME ] || [ -z OUTPUT_PATH ]
+if [ -z "$UPLOAD_PATH" ] || [ -z "$FILE_NAME" ] || [ -z "$OUTPUT_PATH" ]
 then
 	echo "ERROR: Missing Arguments"
 	usage
@@ -68,18 +68,18 @@ temp_output_dir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
 echo 'Created temporary output directory: '$temp_output_dir
 
 set -x
-if [[ $FILE_NAME =~ \.zip$ ]]
+if [[ "$FILE_NAME" =~ \.zip$ ]]
 then
-	unzip $FILE_NAME -d $temp_output_dir
+	unzip "$FILE_NAME" -d $temp_output_dir
 	exit_status=$(echo $?)
-elif [[ $FILE_NAME =~ \.tar\.gz$ ]] || [[ $FILE_NAME =~ \.tgz$ ]]
+elif [[ "$FILE_NAME" =~ \.tar\.gz$ ]] || [[ "$FILE_NAME" =~ \.tgz$ ]]
 then
-	tar -xvzf $FILE_NAME -C $temp_output_dir
+	tar -xvzf "$FILE_NAME" -C $temp_output_dir
 	exit_status=$(echo $?)
-elif [[ $FILE_NAME =~ \.bz2$ ]] || [[ $FILE_NAME =~ \.bzip2$ ]]
+elif [[ "$FILE_NAME" =~ \.bz2$ ]] || [[ "$FILE_NAME" =~ \.bzip2$ ]]
 then
-	filename_without_extension=$(echo $FILE_NAME | cut -f 1 -d '.')
-	bzip2 -dc $FILE_NAME > $temp_output_dir/$filename_without_extension
+	filename_without_extension=$(echo "$FILE_NAME" | cut -f 1 -d '.')
+	bzip2 -dc "$FILE_NAME" > $temp_output_dir/"$filename_without_extension"
 	exit_status=$(echo $?)
 else
 	echo 'ERROR: Could not interpret archive type.'
@@ -91,7 +91,7 @@ extracted_file_count=$(($(find $temp_output_dir/* -type d -maxdepth 0 | wc -l) +
 if [ $extracted_file_count != 0 ] && [ $exit_status == 0 ]
 then
 	echo "Copying extracted files to '$OUTPUT_PATH' in HDFS"
-	hadoop fs -put $temp_output_dir $OUTPUT_PATH
+	hadoop fs -put $temp_output_dir "$OUTPUT_PATH"
 	exit_status=$(echo $?)
 	echo "Copy to HDFS directory '$OUTPUT_PATH' complete!!!"
 else

+ 3 - 1
desktop/core/src/desktop/lib/tasks/extract_archive/extract_utils.py

@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import urllib
+
 from django.core.urlresolvers import reverse
 from django.utils.translation import ugettext as _
 
@@ -39,7 +41,7 @@ def extract_archive_in_hdfs(request, upload_path, file_name):
       shell_command='extract_archive_in_hdfs.sh',
       arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
       archives=[],
-      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}],
+      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}],
       env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]
   )