Bläddra i källkod

HUE-84. hadoop.fs.hadoopfs_test:test_i18n_namespace failing

bc Wong 15 år sedan
förälder
incheckning
5bfb268a83

+ 7 - 0
apps/filebrowser/src/filebrowser/forms.py

@@ -18,6 +18,7 @@
 from django import forms
 from django.forms import FileField, CharField, BooleanField, Textarea
 
+from desktop.lib import i18n
 from filebrowser.lib import rwx
 from hadoop.fs import normpath
 from django.contrib.auth.models import User, Group
@@ -39,6 +40,12 @@ class EditorForm(forms.Form):
   contents = CharField(widget=Textarea, label="Contents", required=False)
   encoding = CharField(label='Encoding', required=False)
 
+  def clean_encoding(self):
+    encoding = self.cleaned_data.get('encoding', '').strip()
+    if not encoding:
+      return i18n.get_site_encoding()
+    return encoding
+
 class RenameForm(forms.Form):
   op = "rename"
   src_path = CharField(label="File to rename", help_text="The file to rename.")

+ 2 - 2
apps/filebrowser/src/filebrowser/views.py

@@ -137,7 +137,7 @@ def edit(request, path, form=None):
     raise PopupException("File too big to edit: %s" % (path,))
 
   if not form:
-    encoding = request.REQUEST.get('encoding', i18n.get_site_encoding())
+    encoding = request.REQUEST.get('encoding') or i18n.get_site_encoding()
     if stats:
       f = request.fs.open(path)
       try:
@@ -360,7 +360,7 @@ def display(request, path):
     raise PopupException("Not a file: '%s'" % (path,))
 
   stats = request.fs.stats(path)
-  encoding = request.GET.get('encoding', i18n.get_site_encoding())
+  encoding = request.GET.get('encoding') or i18n.get_site_encoding()
 
   # I'm mixing URL-based parameters and traditional
   # HTTP GET parameters, since URL-based parameters

+ 9 - 9
apps/filebrowser/src/filebrowser/views_test.py

@@ -174,10 +174,10 @@ def test_view_i18n():
     assert_equal("http://testserver/filebrowser/view/user/test", response["location"])
   finally:
     try:
-      cluster.fs.rmtree('/user/test')
       cluster.fs.rmtree('/test-filebrowser/')
-    except:
-      pass      # Don't let cleanup errors mask earlier failures
+      cluster.fs.rmtree('/user/test')
+    except Exception, ex:
+      LOG.error('Failed to cleanup test directory: %s' % (ex,))
     cluster.shutdown()
 
 
@@ -204,8 +204,8 @@ def view_helper(cluster, encoding, content):
   finally:
     try:
       cluster.fs.remove(filename)
-    except:
-      pass
+    except Exception, ex:
+      LOG.error('Failed to cleanup %s: %s' % (filename, ex))
 
 
 @attr('requires_hadoop')
@@ -238,8 +238,8 @@ def test_edit_i18n():
   finally:
     try:
       cluster.fs.rmtree('/test-filebrowser/')
-    except:
-      pass      # Don't let cleanup errors mask earlier failures
+    except Exception, ex:
+      LOG.error('Failed to remove tree /test-filebrowser: %s' % (ex,))
     cluster.shutdown()
 
 
@@ -293,5 +293,5 @@ def edit_helper(cluster, encoding, contents_pass_1, contents_pass_2):
   finally:
     try:
       cluster.fs.remove(filename)
-    except:
-      pass
+    except Exception, ex:
+      LOG.error('Failed to remove %s: %s' % (filename, ex))

+ 30 - 1
desktop/core/src/desktop/lib/i18n.py

@@ -20,11 +20,16 @@ Library methods to deal with non-ascii data
 """
 
 import codecs
-import desktop.conf
 import logging
+import os
+import re
+
+import desktop.conf
 
 SITE_ENCODING = None
 REPLACEMENT_CHAR = u'\ufffd'
+DEFAULT_LANG = 'en_US.UTF-8'
+
 
 def get_site_encoding():
   """Get the default site encoding"""
@@ -47,3 +52,27 @@ def validate_encoding(encoding):
     return True
   except LookupError:
     return False
+
+
+_CACHED_ENV = None
+
+def make_utf8_env():
+  """
+  Communication with child processes is in utf8. Make a utf8 environment.
+  """
+  global _CACHED_ENV
+  if not _CACHED_ENV:
+    # LANG are in the form of <language>[.<encoding>[@<modifier>]]
+    # We want to replace the "encoding" part with UTF-8
+    lang_re = re.compile('\.([^@]*)')
+
+    env = os.environ.copy()
+    lang = env.get('LANG', DEFAULT_LANG)
+    if lang_re.search(lang):
+      lang = lang_re.sub('.UTF-8', lang)
+    else:
+      lang = DEFAULT_LANG
+
+    env['LANG'] = lang
+    _CACHED_ENV = env
+  return _CACHED_ENV

+ 2 - 2
desktop/core/src/desktop/middleware.py

@@ -62,7 +62,7 @@ class ExceptionMiddleware(object):
   """
   def process_exception(self, request, exception):
     import traceback
-    logging.info("Processing exception: %s: %s" % (str(exception), traceback.format_exc()))
+    logging.info("Processing exception: %s: %s" % (exception, traceback.format_exc()))
 
     if hasattr(exception, "response"):
       return exception.response(request)
@@ -79,7 +79,7 @@ class ExceptionMiddleware(object):
     # need to do some kind of nicer handling than the built-in page
     # Note that exception may actually be an Http404 or similar.
     if request.ajax:
-      err = "An error occurred: " + str(exception)
+      err = "An error occurred: %s" % (exception,)
       logging.exception("Middleware caught an exception")
       return PopupException(err, detail=None).response(request)
 

+ 5 - 2
desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py

@@ -32,7 +32,7 @@ from thrift.transport import TSocket
 from thrift.protocol import TBinaryProtocol
 
 from django.utils.encoding import smart_str, force_unicode
-from desktop.lib import thrift_util
+from desktop.lib import thrift_util, i18n
 from hadoop.api.hdfs import Namenode, Datanode
 from hadoop.api.hdfs.constants import QUOTA_DONT_SET, QUOTA_RESET
 from hadoop.api.common.ttypes import RequestContext, IOException
@@ -119,6 +119,8 @@ class HadoopFileSystem(object):
       try:
         return function(*args, **kwargs)
       except IOException, e:
+        e.msg = force_unicode(e.msg, errors='replace')
+        e.stack = force_unicode(e.stack, errors='replace')
         LOG.exception("Exception in Hadoop FS call " + function.__name__)
         if e.clazz == HADOOP_ACCESSCONTROLEXCEPTION:
           raise PermissionDeniedException(e.msg, e)
@@ -676,7 +678,8 @@ class FileUpload(object):
                                    stdin=subprocess.PIPE,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE,
-           close_fds=True,
+                                   close_fds=True,
+                                   env=i18n.make_utf8_env(),
                                    bufsize=WRITE_BUFFER_SIZE)
   @require_open
   def write(self, data):

+ 14 - 5
desktop/libs/hadoop/src/hadoop/fs/hadoopfs_test.py

@@ -20,6 +20,7 @@ Tests for Hadoop FS.
 """
 from nose.tools import assert_false, assert_true, assert_equals, assert_raises
 from nose.plugins.attrib import attr
+import logging
 import posixfile
 import random
 
@@ -27,6 +28,8 @@ from hadoop import mini_cluster
 from hadoop.fs.exceptions import PermissionDeniedException
 from hadoop.fs.hadoopfs import HadoopFileSystem
 
+LOG = logging.getLogger(__name__)
+
 @attr('requires_hadoop')
 def test_hadoopfs():
   """
@@ -308,11 +311,11 @@ def test_i18n_namespace():
   try:
     # Create a directory
     cluster.fs.mkdir(dir_path)
-    # Create a file (same name) in the directory
-    cluster.fs.open(file_path, 'w').close()
-
     # Directory is there
     check_existence(name, prefix)
+
+    # Create a file (same name) in the directory
+    cluster.fs.open(file_path, 'w').close()
     # File is there
     check_existence(name, dir_path)
 
@@ -329,9 +332,15 @@ def test_i18n_namespace():
     # Test rmtree
     cluster.fs.rmtree(dir_path)
     check_existence(name, prefix, present=False)
+
+    # Test exception can handle non-ascii characters
+    try:
+      cluster.fs.rmtree(dir_path)
+    except IOError, ex:
+      LOG.info('Successfully caught error: %s' % (ex,))
   finally:
     try:
       cluster.fs.rmtree(prefix)
-    except:
-      pass
+    except Exception, ex:
+      LOG.error('Failed to cleanup %s: %s' % (prefix, ex))
     cluster.shutdown()

+ 1 - 1
tools/hudson/build-functions

@@ -27,7 +27,7 @@ if [ -z "$CDH_URL" ] ; then
       CDH_URL="http://archive.cloudera.com/cdh/2/hadoop-0.20.1+169.68.tar.gz"
       ;;
     3)
-      CDH_URL="http://archive.cloudera.com/cdh/3/hadoop-0.20.2+228.tar.gz"
+      CDH_URL="http://archive.cloudera.com/cdh/3/hadoop-0.20.2+320.tar.gz"
       ;;
   esac
 fi