ソースを参照

HUE-5613 [fb] Set output path outside shell script for extract batch job

krish 9 年 前
コミット
d76e3805e7

+ 9 - 7
desktop/core/src/desktop/lib/tasks/extract_archive/extract_in_hdfs.sh

@@ -17,6 +17,7 @@
 
 UPLOAD_PATH=
 FILE_NAME=
+OUTPUT_PATH=
 
 function usage()
 {
@@ -42,6 +43,9 @@ while [ "$1" != "" ]; do
         -f | --file-name)
             FILE_NAME=$VALUE
             ;;
+        -o | --output-path)
+            OUTPUT_PATH=$VALUE
+            ;;
         *)
             echo "ERROR: unknown parameter \"$PARAM\""
             usage
@@ -51,7 +55,7 @@ while [ "$1" != "" ]; do
     shift
 done
 
-if [ -z $UPLOAD_PATH ] || [ -z $FILE_NAME ]
+if [ -z $UPLOAD_PATH ] || [ -z $FILE_NAME ] || [ -z OUTPUT_PATH ]
 then
 	echo "ERROR: Missing Arguments"
 	usage
@@ -60,9 +64,6 @@ fi
 
 exit_status=0
 
-# output directory inside HDFS upload dir
-filename_without_extension=$(echo $FILE_NAME | cut -f 1 -d '.')
-
 temp_output_dir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
 echo 'Created temporary output directory: '$temp_output_dir
 
@@ -77,6 +78,7 @@ then
 	exit_status=$(echo $?)
 elif [[ $FILE_NAME =~ \.bz2$ ]] || [[ $FILE_NAME =~ \.bzip2$ ]]
 then
+	filename_without_extension=$(echo $FILE_NAME | cut -f 1 -d '.')
 	bzip2 -dc $FILE_NAME > $temp_output_dir/$filename_without_extension
 	exit_status=$(echo $?)
 else
@@ -88,10 +90,10 @@ set +x
 extracted_file_count=$(($(find $temp_output_dir/* -type d -maxdepth 0 | wc -l) + $(find $temp_output_dir/* -type f -maxdepth 0 | wc -l)))
 if [ $extracted_file_count != 0 ] && [ $exit_status == 0 ]
 then
-	echo 'Copying extracted files to '$UPLOAD_PATH/$filename_without_extension' in HDFS'
-	hadoop fs -put $temp_output_dir $UPLOAD_PATH/$filename_without_extension
+	echo "Copying extracted files to '$OUTPUT_PATH' in HDFS"
+	hadoop fs -put $temp_output_dir $OUTPUT_PATH
 	exit_status=$(echo $?)
-	echo 'Copy to HDFS directory '$UPLOAD_PATH/$filename_without_extension' complete!!!'
+	echo "Copy to HDFS directory '$OUTPUT_PATH' complete!!!"
 else
 	exit_status=1
 fi

+ 2 - 1
desktop/core/src/desktop/lib/tasks/extract_archive/extract_utils.py

@@ -26,6 +26,7 @@ from notebook.connectors.base import Notebook
 def extract_archive_in_hdfs(request, upload_path, file_name):
   _upload_extract_archive_script_to_hdfs(request.fs)
 
+  output_path = upload_path + '/' + file_name.split('.')[0]
   shell_notebook = Notebook(
       description=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
       isManaged=True
@@ -33,7 +34,7 @@ def extract_archive_in_hdfs(request, upload_path, file_name):
 
   shell_notebook.add_shell_snippet(
       shell_command='extract_archive_in_hdfs.sh',
-      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}],
+      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
       archives=[],
       files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}],
       env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]