Browse Source

HUE-366. Update Beeswax Hive from 0.5 to 0.6.
Updating Hive jars from 0.5 to 0.6.
Resolving Hive 0.5 -> Hive 0.6 API incompatibilities in BeeswaxServiceImpl.
Updating beeswax_server.sh, hive-default-xml.jar and tests to work with Hive 0.6.

vinithra 15 years ago
parent
commit
41382f5f61
48 changed files with 38 additions and 43 deletions
  1. 1 1
      apps/beeswax/beeswax_server.sh
  2. BIN
      apps/beeswax/hive/lib/commons-cli-2.0-SNAPSHOT.jar
  3. BIN
      apps/beeswax/hive/lib/commons-codec-1.4.jar
  4. BIN
      apps/beeswax/hive/lib/commons-dbcp-1.4.jar
  5. BIN
      apps/beeswax/hive/lib/commons-pool-1.5.4.jar
  6. BIN
      apps/beeswax/hive/lib/datanucleus-connectionpool-2.0.1.jar
  7. BIN
      apps/beeswax/hive/lib/datanucleus-core-1.1.2-patched.jar
  8. BIN
      apps/beeswax/hive/lib/datanucleus-core-2.0.3.jar
  9. BIN
      apps/beeswax/hive/lib/datanucleus-enhancer-1.1.2.jar
  10. BIN
      apps/beeswax/hive/lib/datanucleus-enhancer-2.0.3.jar
  11. BIN
      apps/beeswax/hive/lib/datanucleus-rdbms-1.1.2.jar
  12. BIN
      apps/beeswax/hive/lib/datanucleus-rdbms-2.0.3.jar
  13. BIN
      apps/beeswax/hive/lib/hbase-0.20.3-test.jar
  14. BIN
      apps/beeswax/hive/lib/hbase-0.20.3.jar
  15. BIN
      apps/beeswax/hive/lib/hive-anttasks-0.6.0-CDH3-dev.jar
  16. BIN
      apps/beeswax/hive/lib/hive-cli-0.5.0.jar
  17. BIN
      apps/beeswax/hive/lib/hive-cli-0.6.0-CDH3-dev.jar
  18. BIN
      apps/beeswax/hive/lib/hive-common-0.5.0.jar
  19. BIN
      apps/beeswax/hive/lib/hive-common-0.6.0-CDH3-dev.jar
  20. BIN
      apps/beeswax/hive/lib/hive-default-xml-0.5.0.jar
  21. BIN
      apps/beeswax/hive/lib/hive-default-xml-0.6.0.jar
  22. BIN
      apps/beeswax/hive/lib/hive-exec-0.5.0.jar
  23. BIN
      apps/beeswax/hive/lib/hive-exec-0.6.0-CDH3-dev.jar
  24. BIN
      apps/beeswax/hive/lib/hive-hwi-0.5.0.jar
  25. BIN
      apps/beeswax/hive/lib/hive-hwi-0.6.0-CDH3-dev.jar
  26. BIN
      apps/beeswax/hive/lib/hive-hwi-0.6.0-CDH3-dev.war
  27. BIN
      apps/beeswax/hive/lib/hive-jdbc-0.5.0.jar
  28. BIN
      apps/beeswax/hive/lib/hive-jdbc-0.6.0-CDH3-dev.jar
  29. BIN
      apps/beeswax/hive/lib/hive-metastore-0.5.0.jar
  30. BIN
      apps/beeswax/hive/lib/hive-metastore-0.6.0-CDH3-dev.jar
  31. BIN
      apps/beeswax/hive/lib/hive-serde-0.5.0.jar
  32. BIN
      apps/beeswax/hive/lib/hive-serde-0.6.0-CDH3-dev.jar
  33. BIN
      apps/beeswax/hive/lib/hive-service-0.6.0-CDH3-dev.jar
  34. BIN
      apps/beeswax/hive/lib/hive-shims-0.5.0.jar
  35. BIN
      apps/beeswax/hive/lib/hive-shims-0.6.0-CDH3-dev.jar
  36. BIN
      apps/beeswax/hive/lib/hive_contrib.jar
  37. BIN
      apps/beeswax/hive/lib/hive_hbase-handler.jar
  38. BIN
      apps/beeswax/hive/lib/jdo2-api-2.3-ec.jar
  39. BIN
      apps/beeswax/hive/lib/junit-4.5.jar
  40. BIN
      apps/beeswax/hive/lib/log4j-1.2.16.jar
  41. BIN
      apps/beeswax/hive/lib/slf4j-api-1.6.1.jar
  42. BIN
      apps/beeswax/hive/lib/slf4j-log4j12-1.6.1.jar
  43. BIN
      apps/beeswax/hive/lib/thrift-0.5.0.jar
  44. BIN
      apps/beeswax/hive/lib/thrift-fb303-0.5.0.jar
  45. BIN
      apps/beeswax/hive/lib/zookeeper-3.2.2.jar
  46. 18 27
      apps/beeswax/java/src/com/cloudera/beeswax/BeeswaxServiceImpl.java
  47. 2 2
      apps/beeswax/src/beeswax/test_base.py
  48. 17 13
      apps/beeswax/src/beeswax/tests.py

+ 1 - 1
apps/beeswax/beeswax_server.sh

@@ -56,7 +56,7 @@ fi
 if [ -f $HADOOP_CONF_DIR/hadoop-env.sh ]; then
   . $HADOOP_CONF_DIR/hadoop-env.sh
 fi
-export HADOOP_CONF_DIR=$BEESWAX_ROOT/../../desktop/conf:${BEESWAX_HIVE_LIB}/hive-default-xml-0.5.0.jar:${HADOOP_CONF_DIR}:$(find $BEESWAX_HIVE_LIB -name "libfb303.jar" | head -1)
+export HADOOP_CONF_DIR=$BEESWAX_ROOT/../../desktop/conf:${BEESWAX_HIVE_LIB}/hive-default-xml-0.6.0.jar:${HADOOP_CONF_DIR}:$(find $BEESWAX_HIVE_LIB -name "thrift-fb303-0.5.0.jar" | head -1)
 echo \$HADOOP_CONF_DIR=$HADOOP_CONF_DIR
 
 # Note: I've had trouble running this with just "java -jar" with the classpath

BIN
apps/beeswax/hive/lib/commons-cli-2.0-SNAPSHOT.jar


BIN
apps/beeswax/hive/lib/commons-codec-1.4.jar


BIN
apps/beeswax/hive/lib/commons-dbcp-1.4.jar


BIN
apps/beeswax/hive/lib/commons-pool-1.5.4.jar


BIN
apps/beeswax/hive/lib/datanucleus-connectionpool-2.0.1.jar


BIN
apps/beeswax/hive/lib/datanucleus-core-1.1.2-patched.jar


BIN
apps/beeswax/hive/lib/datanucleus-core-2.0.3.jar


BIN
apps/beeswax/hive/lib/datanucleus-enhancer-1.1.2.jar


BIN
apps/beeswax/hive/lib/datanucleus-enhancer-2.0.3.jar


BIN
apps/beeswax/hive/lib/datanucleus-rdbms-1.1.2.jar


BIN
apps/beeswax/hive/lib/datanucleus-rdbms-2.0.3.jar


BIN
apps/beeswax/hive/lib/hbase-0.20.3-test.jar


BIN
apps/beeswax/hive/lib/hbase-0.20.3.jar


BIN
apps/beeswax/hive/lib/hive-anttasks-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-cli-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-cli-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-common-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-common-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-default-xml-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-default-xml-0.6.0.jar


BIN
apps/beeswax/hive/lib/hive-exec-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-exec-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-hwi-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-hwi-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-hwi-0.6.0-CDH3-dev.war


BIN
apps/beeswax/hive/lib/hive-jdbc-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-jdbc-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-metastore-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-metastore-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-serde-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-serde-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-service-0.5.0.jar → apps/beeswax/hive/lib/hive-service-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive-shims-0.5.0.jar


BIN
apps/beeswax/hive/lib/hive-shims-0.6.0-CDH3-dev.jar


BIN
apps/beeswax/hive/lib/hive_contrib.jar


BIN
apps/beeswax/hive/lib/hive_hbase-handler.jar


BIN
apps/beeswax/hive/lib/jdo2-api-2.3-SNAPSHOT.jar → apps/beeswax/hive/lib/jdo2-api-2.3-ec.jar


BIN
apps/beeswax/hive/lib/junit-4.5.jar


BIN
apps/beeswax/hive/lib/log4j-1.2.16.jar


BIN
apps/beeswax/hive/lib/slf4j-api-1.6.1.jar


BIN
apps/beeswax/hive/lib/slf4j-log4j12-1.6.1.jar


BIN
apps/beeswax/hive/lib/libthrift.jar → apps/beeswax/hive/lib/thrift-0.5.0.jar


BIN
apps/beeswax/hive/lib/libfb303.jar → apps/beeswax/hive/lib/thrift-fb303-0.5.0.jar


BIN
apps/beeswax/hive/lib/zookeeper-3.2.2.jar


+ 18 - 27
apps/beeswax/java/src/com/cloudera/beeswax/BeeswaxServiceImpl.java

@@ -42,7 +42,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.UUID;
-import java.util.Vector;
 import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.SSLContext;
 
@@ -56,10 +55,8 @@ import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.ExplainSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.plan.fetchWork;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -215,9 +212,9 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
         CommandProcessor p = CommandProcessorFactory.get(tokens[0]);
         int res;
         if (p instanceof Driver) {
-          res = p.run(cmd);
+          res = p.run(cmd).getResponseCode();
         } else {
-          res = p.run(cmd1);
+          res = p.run(cmd1).getResponseCode();
         }
         if (res != 0) {
           throwException(new RuntimeException(getErrorStreamAsString()));
@@ -323,7 +320,7 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
     }
 
     private void materializeResults(Results r, boolean startOver) throws IOException {
-      if (driver.getPlan().getPlan().getFetchTask() == null) {
+      if (driver.getPlan().getFetchTask() == null) {
         // This query is never going to return anything.
         r.has_more = false;
         r.setData(Collections.<String>emptyList());
@@ -333,11 +330,12 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
 
       if (startOver) {
         // This is totally inappropriately reaching into internals.
-        driver.getPlan().getPlan().getFetchTask().initialize(hiveConf,
-            driver.getPlan());
+        driver.getPlan().getFetchTask().initialize(hiveConf,
+            driver.getPlan(), null);
         startRow = 0;
       }
-      Vector<String> v = new Vector<String>();
+
+      ArrayList<String> v = new ArrayList<String>();
       r.setData(v);
       r.has_more = driver.getResults(v);
       r.start_row = startRow;
@@ -365,8 +363,8 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
         LOG.error("Error getting schema for query: " + query.query, ex);
       }
 
-      fetchWork work = getFetchWork();
-      tableDesc desc = work.getTblDesc();
+      FetchWork work = getFetchWork();
+      TableDesc desc = work.getTblDesc();
       String tabledir = null;
       String tablename = null;
       String sep = null;
@@ -383,19 +381,15 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
     }
 
     /**
-     * Get the fetchWork. Only SELECTs have them.
+     * Get the FetchWork. Only SELECTs have them.
      */
-    synchronized private fetchWork getFetchWork() {
+    synchronized private FetchWork getFetchWork() {
       QueryPlan plan = driver.getPlan();
       FetchTask fetchTask = null;
       if (plan != null) {
-        BaseSemanticAnalyzer sem = plan.getPlan();
-        if (sem.getFetchTask() != null) {
-          if (!sem.getFetchTaskInit()) {
-            sem.setFetchTaskInit(true);
-            sem.getFetchTask().initialize(hiveConf, plan);
-          }
-          fetchTask = (FetchTask) sem.getFetchTask();
+        fetchTask = plan.getFetchTask();
+        if (fetchTask != null) {
+          fetchTask.initialize(hiveConf, plan, null);
         }
       }
 
@@ -403,7 +397,7 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
         return null;
       }
 
-      fetchWork work = (fetchWork) fetchTask.getWork();
+      FetchWork work = fetchTask.getWork();
       return work;
     }
 
@@ -412,16 +406,13 @@ public class BeeswaxServiceImpl implements BeeswaxService.Iface {
       // By manipulating the query, this will make errors harder to find.
       query.query = "EXPLAIN " + query.query;
       checkedCompile();
-      if (!(driver.getPlan().getPlan() instanceof ExplainSemanticAnalyzer)) {
-        throwException(new RuntimeException("Expected explain plan."));
-      }
 
       int ret;
       if (0 != (ret = driver.execute())) {
         throwException(new RuntimeException("Failed to execute: EXPLAIN " + ret));
       }
       StringBuilder sb = new StringBuilder();
-      Vector<String> v = new Vector<String>();
+      ArrayList<String> v = new ArrayList<String>();
       try {
         while (driver.getResults(v)) {
           for (String s : v) {

+ 2 - 2
apps/beeswax/src/beeswax/test_base.py

@@ -168,7 +168,7 @@ def wait_for_query_to_finish(client, response, max=30.0):
 def make_query(client, query, submission_type="Execute",
                udfs=None, settings=None, resources=None,
                wait=False, name=None, desc=None, local=True,
-               is_parameterized=True, **kwargs):
+               is_parameterized=True, max=30.0, **kwargs):
   """
   Prepares arguments for the execute view.
 
@@ -221,7 +221,7 @@ def make_query(client, query, submission_type="Execute",
   response = client.post("/beeswax/execute", parameters, **kwargs)
 
   if wait:
-    return wait_for_query_to_finish(client, response)
+    return wait_for_query_to_finish(client, response, max)
   return response
 
 

+ 17 - 13
apps/beeswax/src/beeswax/tests.py

@@ -52,11 +52,11 @@ CSV_LINK_PAT = re.compile('/beeswax/download/\d+/csv')
 def _make_query(client, query, submission_type="Execute",
                 udfs=None, settings=None, resources=[],
                 wait=False, name=None, desc=None, local=True,
-                is_parameterized=True, **kwargs):
+                is_parameterized=True, max=30.0, **kwargs):
   """Wrapper around the real make_query"""
   res = make_query(client, query, submission_type,
                    udfs, settings, resources,
-                   wait, name, desc, local, is_parameterized, **kwargs)
+                   wait, name, desc, local, is_parameterized, max, **kwargs)
   # Should be in the history if it's submitted.
   if submission_type == 'Execute':
     fragment = collapse_whitespace(smart_str(query[:20]))
@@ -136,7 +136,7 @@ for x in sys.stdin:
     response = wait_for_query_to_finish(self.client, response, max=180.0)
     # Check that we actually got a compressed output
     files = self.cluster.fs.listdir("/user/hive/warehouse/test2")
-    assert_equal(1, len(files))
+    assert_true(len(files) >= 1)
     assert_true(files[0].endswith(".deflate"))
     # And check that the name is right...
     assert_true("test_query_with_setting" in [ x.profile.name for x in self.cluster.jt.all_jobs().jobs ])
@@ -529,10 +529,13 @@ for x in sys.stdin:
       # Check that data is right
       if verify:
         target_ls = self.cluster.fs.listdir(target_dir)
-        assert_equal(1, len(target_ls))
-        target_file = self.cluster.fs.open(target_dir + '/' + target_ls[0])
-        data_buf = target_file.read()
-        target_file.close()
+        assert_true(len(target_ls) >= 1)
+        data_buf = ""
+        for target in target_ls:
+          target_file = self.cluster.fs.open(target_dir + '/' + target)
+          data_buf += target_file.read()
+          target_file.close()
+
         assert_equal(256, len(data_buf.strip().split('\n')))
         assert_true('255' in data_buf)
       return resp
@@ -541,13 +544,13 @@ for x in sys.stdin:
 
     # Not supported. SELECT *. (Result dir is same as table dir.)
     hql = "SELECT * FROM test"
-    resp = _make_query(self.client, hql, wait=True)
+    resp = _make_query(self.client, hql, wait=True, local=False, max=180.0)
     resp = save_and_verify(resp, TARGET_DIR_ROOT + '/1', verify=False)
     assert_true('not supported' in resp.content)
 
     # SELECT columns. (Result dir is in /tmp.)
     hql = "SELECT foo, bar FROM test"
-    resp = _make_query(self.client, hql, wait=True)
+    resp = _make_query(self.client, hql, wait=True, local=False, max=180.0)
     resp = save_and_verify(resp, TARGET_DIR_ROOT + '/2')
     # Results has a link to the FB
     assert_true('Query results stored in' in resp.content)
@@ -555,7 +558,7 @@ for x in sys.stdin:
 
     # Not supported. Partition tables
     hql = "SELECT * FROM test_partitions"
-    resp = _make_query(self.client, hql, wait=True)
+    resp = _make_query(self.client, hql, wait=True, local=False, max=180.0)
     resp = save_and_verify(resp, TARGET_DIR_ROOT + '/3', verify=False)
     assert_true('not supported' in resp.content)
 
@@ -575,7 +578,8 @@ for x in sys.stdin:
       wait_for_query_to_finish(self.client, resp, max=120)
 
       # Check that data is right. The SELECT may not give us the whole table.
-      resp = _make_query(self.client, 'SELECT * FROM %s' % (target_tbl,), wait=True)
+      resp = _make_query(self.client, 'SELECT * FROM %s' % (target_tbl,), wait=True,
+                        local=False)
       for i in xrange(90):
         assert_equal([str(i), '0x%x' % (i,)], resp.context['results'][i])
 
@@ -583,12 +587,12 @@ for x in sys.stdin:
 
     # SELECT *. (Result dir is same as table dir.)
     hql = "SELECT * FROM test"
-    resp = _make_query(self.client, hql, wait=True)
+    resp = _make_query(self.client, hql, wait=True, local=False, max=180.0)
     save_and_verify(resp, TARGET_TBL_ROOT + '_1')
 
     # SELECT columns. (Result dir is in /tmp.)
     hql = "SELECT foo, bar FROM test"
-    resp = _make_query(self.client, hql, wait=True)
+    resp = _make_query(self.client, hql, wait=True, local=False, max=180.0)
     save_and_verify(resp, TARGET_TBL_ROOT + '_2')