Explorar el Código

[core] Adding HADOOP_HOME for Hive tests

Romain Rigaux hace 12 años
padre
commit
9f51b6f

+ 5 - 1
apps/beeswax/src/beeswax/server/hive_server2_lib.py

@@ -304,7 +304,11 @@ class HiveServerClient:
 
 
   def open_session(self, user):
-    kwargs = {'username': user.username, 'configuration': {}}
+    kwargs = {
+        'username': user.username,
+        'configuration': {},
+        'client_protocol': TOpenSessionReq.thrift_spec[1][4], # Thrift default not automatic
+    }
 
     if self.use_sasl:
       kerberos_principal_short_name = KERBEROS.HUE_PRINCIPAL.get().split('/', 1)[0]

+ 6 - 1
apps/beeswax/src/beeswax/test_base.py

@@ -53,7 +53,12 @@ def _start_server(cluster):
 
   env = cluster._mr2_env.copy()
 
+  hadoop_cp_proc = subprocess.Popen(args=[get_run_root('ext/hadoop/hadoop') + '/bin/hadoop', 'classpath'], env=env, cwd=cluster._tmpdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  hadoop_cp_proc.wait()
+  hadoop_cp = hadoop_cp_proc.stdout.read().strip()
+
   env.update({
+    'HADOOP_HOME': get_run_root('ext/hadoop/hadoop'), # Used only by Hive for some reason
     'HIVE_CONF_DIR': beeswax.conf.HIVE_CONF_DIR.get(),
     'HIVE_SERVER2_THRIFT_PORT': str(HIVE_SERVER_TEST_PORT),
     'HADOOP_MAPRED_HOME': get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce',
@@ -68,7 +73,7 @@ def _start_server(cluster):
        + ':' +
        get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
        ,
-    'HADOOP_CLASSPATH': '',
+      'HADOOP_CLASSPATH': hadoop_cp,     
   })
 
   if os.getenv("JAVA_HOME"):

+ 0 - 2
apps/beeswax/src/beeswax/tests.py

@@ -520,8 +520,6 @@ for x in sys.stdin:
       assert_equal( [ i + 1, i + 2 ], answer)
 
   def test_data_export_limit_clause(self):
-    raise SkipTest
-
     limit = 3
     hql = 'SELECT foo FROM test limit %d' % (limit,)
     query = hql_query(hql)

+ 7 - 4
tools/jenkins/build-functions

@@ -63,7 +63,7 @@ build_hadoop() {
   rm -rf "$HADOOP_DIR/${CDH_VERSION}"
   echo "Unpacking $CDH_CACHE to $HADOOP_DIR"
   tar -C $HADOOP_DIR -xzf $CDH_CACHE
-  ln -s $HADOOP_DIR/${CDH_VERSION} $HADOOP_DIR/hadoop
+  ln -sf $HADOOP_DIR/${CDH_VERSION} $HADOOP_DIR/hadoop
   # For Hive
   ln -sf $HADOOP_DIR/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-*.jar $HADOOP_DIR/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar
   ln -sf $HADOOP_DIR/hadoop/share/hadoop/common/hadoop-common-*-SNAPSHOT.jar $HADOOP_DIR/hadoop/share/hadoop/common/hadoop-common.jar
@@ -95,8 +95,11 @@ build_hive() {
   rm -rf $HIVE_HOME
   echo "Unpacking $HIVE_CACHE to $HIVE_DIR"
   tar -C $HIVE_DIR -xzf $HIVE_CACHE
-  ln -s $HIVE_DIR/${HIVE_VERSION} $HIVE_DIR/hive
+  ln -sf $HIVE_DIR/${HIVE_VERSION} $HIVE_DIR/hive
   export HIVE_CONF_DIR=$HIVE_HOME/conf
+
+  # Weird HADOOP_HOME, creating a HADOOP_HIVE_HOME
+  #sed -i'.bk' "s|HADOOP=\$HADOOP_HOME/bin/hadoop|HADOOP=\$HADOOP_HIVE_HOME/bin/hadoop|g" $HIVE_HOME/bin/hive
 }
 
 ##########
@@ -123,7 +126,7 @@ build_oozie() {
   export OOZIE_CONF_DIR=$OOZIE_HOME/conf
 
   rm -rf $OOZIE_DIR/oozie
-  ln -s $OOZIE_DIR/${OOZIE_VERSION} $OOZIE_DIR/oozie
+  ln -sf $OOZIE_DIR/${OOZIE_VERSION} $OOZIE_DIR/oozie
 
   mkdir -p $OOZIE_HOME/libext
   tar -C $OOZIE_HOME/libext -zxvf $OOZIE_HOME/oozie-hadooplibs-*.tar.gz
@@ -161,7 +164,7 @@ build_sqoop() {
   export SQOOP_CONF_DIR=$SQOOP_HOME/server/conf
 
   rm -rf $SQOOP_DIR/sqoop
-  ln -s $SQOOP_DIR/${SQOOP_VERSION} $SQOOP_DIR/sqoop
+  ln -sf $SQOOP_DIR/${SQOOP_VERSION} $SQOOP_DIR/sqoop
 
   # Change ports and hostnames to be configurable or replaceable
   sed -i'.bk' 's/12000/${test.port.http}/g' $SQOOP_CONF_DIR/server.xml