Browse Source

HUE-5506 [fb] Add unit test and small nits

krish 9 years ago
parent
commit
7f87b14f0e

+ 5 - 4
apps/filebrowser/src/filebrowser/templates/listdir.mako

@@ -94,6 +94,11 @@ ${ fb_components.menubar() }
                   <i class="fa fa-fw fa-pie-chart"></i> ${_('Summary')}
                 </a>
               </li>
+              % if ENABLE_EXTRACT_UPLOADED_ARCHIVE.get():
+                <li><a href="javascript: void(0)" title="${_('Compress selection into a single archive')}" data-bind="click: confirmCompressFiles, enable: selectedFiles().length > 0">
+                  <i class="fa fa-fw fa-file-archive-o"></i> ${_('Compress')}</a>
+                </li>
+              % endif
             </ul>
           </div>
           
@@ -125,10 +130,6 @@ ${ fb_components.menubar() }
                 <i class="fa fa-play"></i> ${_('Extract')}
               </button>
             % endif
-             <button class="btn compressFilesBtn" title="${_('Compress')}"
-               data-bind="visible: selectedFiles().length > 0, click: confirmCompressFiles">
-               <i class="fa fa-play"></i> ${_('Compress')}
-             </button>
           % endif
         </div>
       </%def>

+ 1 - 1
apps/filebrowser/src/filebrowser/views.py

@@ -1347,7 +1347,7 @@ def extract_archive_using_batch_job(request):
       try:
         response = extract_archive_in_hdfs(request, upload_path, archive_name)
       except Exception, e:
-        response['message'] = _('Exception occurred while compressing: %s' % e)
+        response['message'] = _('Exception occurred while extracting archive: %s' % e)
   else:
     response['message'] = _('ERROR: Configuration parameter enable_extract_uploaded_archive ' +
                             'has to be enabled before calling this method.')

+ 24 - 0
apps/filebrowser/src/filebrowser/views_test.py

@@ -925,6 +925,30 @@ alert("XSS")
     finally:
       cleanup_file(self.cluster, HDFS_ZIP_FILE)
 
+  def test_compress_hdfs_files(self):
+    ENABLE_EXTRACT_UPLOADED_ARCHIVE.set_for_testing(True)
+    prefix = self.cluster.fs_prefix + '/test_compress_files'
+    self.cluster.fs.mkdir(prefix)
+
+    test_dir1 = prefix + '/test_dir1'
+    self.cluster.fs.mkdir(test_dir1)
+    self.cluster.fs.chown(test_dir1, 'test')
+    self.cluster.fs.chmod(test_dir1, 0700)
+
+    test_dir2 = prefix + '/test_dir2'
+    self.cluster.fs.mkdir(test_dir2)
+    self.cluster.fs.chown(test_dir2, 'test')
+    self.cluster.fs.chmod(test_dir2, 0700)
+
+    try:
+      resp = self.c.post('/filebrowser/compress_files', {'upload_path': prefix, 'files[]': ['test_dir1','test_dir2']})
+      response = json.loads(resp.content)
+      assert_equal(0, response['status'], response)
+      assert_true('handle' in response and response['handle']['id'], response)
+    finally:
+      ENABLE_EXTRACT_UPLOADED_ARCHIVE.set_for_testing(False)
+      cleanup_tree(self.cluster, prefix)
+
   def test_upload_zip(self):
     prefix = self.cluster.fs_prefix + '/test_upload_zip'
     self.cluster.fs.mkdir(prefix)

+ 10 - 8
desktop/core/src/desktop/lib/tasks/compress_files/compress_in_hdfs.sh

@@ -17,6 +17,7 @@
 
 UPLOAD_PATH=
 FILE_NAMES=
+OUTPUT_PATH=
 
 function usage()
 {
@@ -25,6 +26,7 @@ function usage()
     echo "\t-h --help"
     echo "\t[-u | --upload-path]=<PATH_IN_HDFS>"
     echo "\t[-f | --file-names]=<FILE_NAMES>"
+    echo "\t[-o | --output-path]=<PATH_IN_HDFS>"
     echo ""
 }
 
@@ -42,6 +44,9 @@ while [ "$1" != "" ]; do
         -f | --file-names)
             FILE_NAMES=$VALUE
             ;;
+        -o | --output-path)
+            OUTPUT_PATH=$VALUE
+            ;;
         *)
             echo "ERROR: unknown parameter \"$PARAM\""
             usage
@@ -51,7 +56,7 @@ while [ "$1" != "" ]; do
     shift
 done
 
-if [ -z $UPLOAD_PATH ] || [ -z $FILE_NAMES ]
+if [ -z $UPLOAD_PATH ] || [ -z $FILE_NAMES ] || [ -z $OUTPUT_PATH ]
 then
 	echo "ERROR: Missing Arguments"
 	usage
@@ -61,9 +66,6 @@ fi
 FILE_NAMES=(${FILE_NAMES//,/ })
 exit_status=0
 
-# output directory inside HDFS upload dir
-filename_without_extension=$(echo $FILE_NAME | cut -f 1 -d '.')
-
 temp_output_dir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
 echo 'Created temporary output directory: '$temp_output_dir
 
@@ -74,14 +76,14 @@ exit_status=$(echo $?)
 set +x
 if [ $exit_status == 0 ]
 then
-	echo "Copying hue_compressed.zip to '$UPLOAD_PATH' in HDFS"
-	hadoop fs -put -f $temp_output_dir/hue_compressed.zip $UPLOAD_PATH
+	echo "Copying hue_compressed.zip to '$OUTPUT_PATH' in HDFS"
+	hadoop fs -put -f $temp_output_dir/hue_compressed.zip $OUTPUT_PATH
 	exit_status=$(echo $?)
 	if [ $exit_status == 0 ]
 	then
-	    echo "Copy to HDFS directory '$UPLOAD_PATH' complete!!!"
+	    echo "Copy to HDFS directory '$OUTPUT_PATH' complete!!!"
 	else
-	    echo "ERROR: Copy to HDFS directory '$UPLOAD_PATH' FAILED!!!"
+	    echo "ERROR: Copy to HDFS directory '$OUTPUT_PATH' FAILED!!!"
 	fi
 else
 	exit_status=1

+ 2 - 1
desktop/core/src/desktop/lib/tasks/compress_files/compress_utils.py

@@ -22,13 +22,14 @@ def compress_files_in_hdfs(request, file_names, upload_path):
 
   _upload_compress_files_script_to_hdfs(request.fs)
 
+  output_path = upload_path
   files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
   files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})
 
   shell_notebook = Notebook()
   shell_notebook.add_shell_snippet(
       shell_command='compress_files_in_hdfs.sh',
-      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}],
+      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-o=' + output_path}],
       archives=[],
       files=files,
       env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}])