Browse Source

HUE-2880 [hadoop] Fix uploading large files to a kerberized HTTPFS

Hue is unable to upload large files into a kerberized cluster when it is
configured to talk to a HDFS HTTPFS server. Here is what's going on:

* Hue's webhdfs creates a session that preserves cookies.
* On Hue's first access to HTTPFS, it makes a request in which HTTPFS returns a
  401. Hue then gets a kerberos ticket and passes it along to talk to HTTPFS,
  which also returns a cookie in the response.
* When Hue uploads a file, it first makes an
  [empty request](https://github.com/cloudera/hue/blob/09bef779db147d016e0a2eda7682f9fea687c63a/desktop/libs/hadoop/src/hadoop/fs/webhdfs.py#L701)
  in order to get redirected to the actual place to perform the upload. It also
  passes along the cookies it received previously.
* Finally it uploads the file. This time however it creates a new session
  without the cookies. Normally we would just go through the standard kerberos
  authentication process (401 and a retry), but if the file is over a certain
  size, it outright rejects the upload and abruptly just closes the connection in
  order to protect against malicious users.
* Hue then displays an `Error: Undefined` popup because of the connection
  error not being properly rendered in the UI.

The fix is simple, just make sure to reuse the session when uploading the file.
Erick Tryzelaar 10 years ago
parent
commit
bc2db4e2df

+ 45 - 40
apps/filebrowser/src/filebrowser/views_test.py

@@ -20,6 +20,7 @@ import json
 import logging
 import logging
 import os
 import os
 import re
 import re
+import tempfile
 import urlparse
 import urlparse
 from avro import schema, datafile, io
 from avro import schema, datafile, io
 
 
@@ -795,57 +796,61 @@ class TestFileBrowserWithHadoop(object):
 
 
 
 
   def test_upload_file(self):
   def test_upload_file(self):
-    prefix = self.cluster.fs_prefix + '/test_upload_file'
-    self.cluster.fs.mkdir(prefix)
+    with tempfile.NamedTemporaryFile() as local_file:
+      local_file.write('01234' * 1024 * 1024)
+      local_file.flush()
 
 
-    USER_NAME = 'test'
-    HDFS_DEST_DIR = prefix + "/tmp/fb-upload-test"
-    LOCAL_FILE = __file__
-    HDFS_FILE = HDFS_DEST_DIR + '/' + os.path.basename(__file__)
+      prefix = self.cluster.fs_prefix + '/test_upload_file'
+      self.cluster.fs.mkdir(prefix)
 
 
-    self.cluster.fs.do_as_superuser(self.cluster.fs.mkdir, HDFS_DEST_DIR)
-    self.cluster.fs.do_as_superuser(self.cluster.fs.chown, HDFS_DEST_DIR, USER_NAME, USER_NAME)
-    self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, HDFS_DEST_DIR, 0700)
+      USER_NAME = 'test'
+      HDFS_DEST_DIR = prefix + "/tmp/fb-upload-test"
+      LOCAL_FILE = local_file.name
+      HDFS_FILE = HDFS_DEST_DIR + '/' + os.path.basename(LOCAL_FILE)
 
 
-    stats = self.cluster.fs.stats(HDFS_DEST_DIR)
-    assert_equal(stats['user'], USER_NAME)
-    assert_equal(stats['group'], USER_NAME)
+      self.cluster.fs.do_as_superuser(self.cluster.fs.mkdir, HDFS_DEST_DIR)
+      self.cluster.fs.do_as_superuser(self.cluster.fs.chown, HDFS_DEST_DIR, USER_NAME, USER_NAME)
+      self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, HDFS_DEST_DIR, 0700)
 
 
-    # Just upload the current python file
-    resp = self.c.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR, # GET param avoids infinite looping
-                       dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
-    response = json.loads(resp.content)
+      stats = self.cluster.fs.stats(HDFS_DEST_DIR)
+      assert_equal(stats['user'], USER_NAME)
+      assert_equal(stats['group'], USER_NAME)
 
 
-    assert_equal(0, response['status'], response)
-    stats = self.cluster.fs.stats(HDFS_FILE)
-    assert_equal(stats['user'], USER_NAME)
-    assert_equal(stats['group'], USER_NAME)
+      # Just upload the current python file
+      resp = self.c.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR, # GET param avoids infinite looping
+                         dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
+      response = json.loads(resp.content)
 
 
-    f = self.cluster.fs.open(HDFS_FILE)
-    actual = f.read()
-    expected = file(LOCAL_FILE).read()
-    assert_equal(actual, expected)
+      assert_equal(0, response['status'], response)
+      stats = self.cluster.fs.stats(HDFS_FILE)
+      assert_equal(stats['user'], USER_NAME)
+      assert_equal(stats['group'], USER_NAME)
 
 
-    # Upload again and so fails because file already exits
-    resp = self.c.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR,
-                       dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
-    response = json.loads(resp.content)
-    assert_equal(-1, response['status'], response)
-    assert_true('already exists' in response['data'], response)
+      f = self.cluster.fs.open(HDFS_FILE)
+      actual = f.read(1024 * 1024 * 5)
+      expected = file(LOCAL_FILE).read()
+      assert_equal(actual, expected, 'files do not match: %s != %s' % (len(actual), len(expected)))
 
 
-    # Upload in / and fails because of missing permissions
-    not_me = make_logged_in_client("not_me", is_superuser=False)
-    grant_access("not_me", "not_me", "filebrowser")
-    try:
-      resp = not_me.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR,
+      # Upload again and so fails because file already exits
+      resp = self.c.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR,
                          dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
                          dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
       response = json.loads(resp.content)
       response = json.loads(resp.content)
       assert_equal(-1, response['status'], response)
       assert_equal(-1, response['status'], response)
-      assert_true('Permission denied' in response['data'], response)
-    except AttributeError:
-      # Seems like a Django bug.
-      # StopFutureHandlers() does not seem to work in test mode as it continues to MemoryFileUploadHandler after perm issue and so fails.
-      pass
+      assert_true('already exists' in response['data'], response)
+
+      # Upload in / and fails because of missing permissions
+      not_me = make_logged_in_client("not_me", is_superuser=False)
+      grant_access("not_me", "not_me", "filebrowser")
+      try:
+        resp = not_me.post('/filebrowser/upload/file?dest=%s' % HDFS_DEST_DIR,
+                           dict(dest=HDFS_DEST_DIR, hdfs_file=file(LOCAL_FILE)))
+        response = json.loads(resp.content)
+        assert_equal(-1, response['status'], response)
+        assert_true('Permission denied' in response['data'], response)
+      except AttributeError:
+        # Seems like a Django bug.
+        # StopFutureHandlers() does not seem to work in test mode as it continues to MemoryFileUploadHandler after perm issue and so fails.
+        pass
 
 
 
 
   def test_upload_zip(self):
   def test_upload_zip(self):

+ 4 - 0
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -709,6 +709,10 @@ class WebHdfs(Hdfs):
 
 
     # Now talk to the real thing. The redirect url already includes the params.
     # Now talk to the real thing. The redirect url already includes the params.
     client = self._make_client(next_url, self.security_enabled, self.ssl_cert_ca_verify)
     client = self._make_client(next_url, self.security_enabled, self.ssl_cert_ca_verify)
+
+    # Make sure to reuse the session in order to preserve the Kerberos cookies.
+    client._session = self._client._session
+
     headers = {'Content-Type': 'application/octet-stream'}
     headers = {'Content-Type': 'application/octet-stream'}
     return resource.Resource(client).invoke(method, data=data, headers=headers)
     return resource.Resource(client).invoke(method, data=data, headers=headers)