Quellcode durchsuchen

HUE-5202 Unit test for validating extract_archive_in_hdfs()

krish vor 9 Jahren
Ursprung
Commit
7dbd1e6

+ 1 - 0
apps/filebrowser/src/filebrowser/forms.py

@@ -111,6 +111,7 @@ class UploadFileForm(forms.Form):
   # The "hdfs" prefix in "hdfs_file" triggers the HDFSfileUploadHandler
   hdfs_file = FileField(forms.Form, label=_("File to Upload"))
   dest = PathField(label=_("Destination Path"), help_text=_("Filename or directory to upload to."))
+  extract_archive = BooleanField(required=False)
 
 class UploadArchiveForm(forms.Form):
   op = "upload"

+ 7 - 3
apps/filebrowser/src/filebrowser/views.py

@@ -1226,6 +1226,7 @@ def _upload_file(request):
     if form.is_valid():
         uploaded_file = request.FILES['hdfs_file']
         dest = form.cleaned_data['dest']
+        extract_archive = form.cleaned_data.get('extract_archive')
         filepath = request.fs.join(dest, uploaded_file.name)
 
         if request.fs.isdir(dest) and posixpath.sep in uploaded_file.name:
@@ -1234,6 +1235,9 @@ def _upload_file(request):
         try:
             request.fs.upload(file=uploaded_file, path=dest, username=request.user.username)
             response['status'] = 0
+            if extract_archive:
+              response['batch_job_response'] = extract_archive_in_hdfs(request, dest, uploaded_file.name)
+
         except IOError, ex:
             already_exists = False
             try:
@@ -1348,9 +1352,9 @@ def _upload_archive(request):
     else:
         raise PopupException(_("Error in upload form: %s") % (form.errors,))
 
-def extract_archive_in_hdfs(fs, upload_path, file_name):
+def extract_archive_in_hdfs(request, upload_path, file_name):
 
-  _upload_extract_archive_script_to_hdfs(fs)
+  _upload_extract_archive_script_to_hdfs(request.fs)
 
   from notebook.connectors.base import Notebook
 
@@ -1360,7 +1364,7 @@ def extract_archive_in_hdfs(fs, upload_path, file_name):
                                    archives=[],
                                    files=[{'value': '/user/hue/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}],
                                    env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}])
-  shell_notebook.execute(request, batch=True)
+  return shell_notebook.execute(request, batch=True)
 
 def _upload_extract_archive_script_to_hdfs(fs):
   if not fs.exists('/user/hue/common/'):

+ 26 - 0
apps/filebrowser/src/filebrowser/views_test.py

@@ -903,6 +903,32 @@ alert("XSS")
         # StopFutureHandlers() does not seem to work in test mode as it continues to MemoryFileUploadHandler after perm issue and so fails.
         pass
 
+  def test_extract_uploaded_archive(self):
+    prefix = self.cluster.fs_prefix + '/test_upload_zip'
+    self.cluster.fs.mkdir(prefix)
+
+    USER_NAME = 'test'
+    HDFS_DEST_DIR = prefix + "/tmp/fb-upload-test"
+    ZIP_FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test.zip')
+    HDFS_ZIP_FILE = HDFS_DEST_DIR + '/test.zip'
+
+    self.cluster.fs.mkdir(HDFS_DEST_DIR)
+    self.cluster.fs.chown(HDFS_DEST_DIR, USER_NAME)
+    self.cluster.fs.chmod(HDFS_DEST_DIR, 0700)
+
+    # Upload archive
+    resp = self.c.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR,
+                       dict(dest=HDFS_DEST_DIR, hdfs_file=file(ZIP_FILE), extract_archive=True))
+    response = json.loads(resp.content)
+    assert_equal(0, response['status'], response)
+    assert_true(self.cluster.fs.exists(HDFS_ZIP_FILE))
+
+    assert_true('batch_job_response' in response)
+    batch_job_response = response['batch_job_response']
+    assert_equal(0, batch_job_response['status'], batch_job_response)
+    assert_true('handle' in batch_job_response and batch_job_response['handle']['id'], batch_job_response)
+
+
 
   def test_upload_zip(self):
     prefix = self.cluster.fs_prefix + '/test_upload_zip'