瀏覽代碼

HUE-438. Making beeswax dependent on hive install.

Removing hive dir from beeswax.

HUE-438. Adding Hive, thrift, fb303 maven dependencies required for Beeswax build.

HUE-438. Updating beeswax tests to use locally installed Hive.

HUE-438. Updating to hive-0.7
vinithra 14 年之前
父節點
當前提交
7a1fc5cbf3
共有 66 個文件被更改,包括 98 次插入556 次删除
  1. 16 5
      apps/beeswax/beeswax_server.sh
  2. 4 0
      apps/beeswax/conf/hue-beeswax.ini
  3. 0 41
      apps/beeswax/hive/VERSION_DATA
  4. 二進制
      apps/beeswax/hive/lib/ant-contrib-1.0b3.jar
  5. 二進制
      apps/beeswax/hive/lib/antlr-runtime-3.0.1.jar
  6. 二進制
      apps/beeswax/hive/lib/asm-3.1.jar
  7. 二進制
      apps/beeswax/hive/lib/commons-cli-1.2.jar
  8. 二進制
      apps/beeswax/hive/lib/commons-codec-1.3.jar
  9. 二進制
      apps/beeswax/hive/lib/commons-collections-3.2.1.jar
  10. 二進制
      apps/beeswax/hive/lib/commons-dbcp-1.4.jar
  11. 二進制
      apps/beeswax/hive/lib/commons-lang-2.4.jar
  12. 二進制
      apps/beeswax/hive/lib/commons-logging-1.0.4.jar
  13. 二進制
      apps/beeswax/hive/lib/commons-logging-api-1.0.4.jar
  14. 二進制
      apps/beeswax/hive/lib/commons-pool-1.5.4.jar
  15. 二進制
      apps/beeswax/hive/lib/datanucleus-connectionpool-2.0.3.jar
  16. 二進制
      apps/beeswax/hive/lib/datanucleus-core-2.0.3.jar
  17. 二進制
      apps/beeswax/hive/lib/datanucleus-enhancer-2.0.3.jar
  18. 二進制
      apps/beeswax/hive/lib/datanucleus-rdbms-2.0.3.jar
  19. 二進制
      apps/beeswax/hive/lib/derby.jar
  20. 二進制
      apps/beeswax/hive/lib/guava-r06.jar
  21. 二進制
      apps/beeswax/hive/lib/hbase-0.89.0-SNAPSHOT-tests.jar
  22. 二進制
      apps/beeswax/hive/lib/hbase-0.89.0-SNAPSHOT.jar
  23. 二進制
      apps/beeswax/hive/lib/hive-anttasks-0.7.0-CDH3B4-SNAPSHOT.jar
  24. 二進制
      apps/beeswax/hive/lib/hive-cli-0.7.0-CDH3B4-SNAPSHOT.jar
  25. 二進制
      apps/beeswax/hive/lib/hive-common-0.7.0-CDH3B4-SNAPSHOT.jar
  26. 二進制
      apps/beeswax/hive/lib/hive-contrib-0.7.0-CDH3B4-SNAPSHOT.jar
  27. 二進制
      apps/beeswax/hive/lib/hive-default-xml-0.7.0-CDH3B4-SNAPSHOT.jar
  28. 二進制
      apps/beeswax/hive/lib/hive-exec-0.7.0-CDH3B4-SNAPSHOT.jar
  29. 二進制
      apps/beeswax/hive/lib/hive-hbase-handler-0.7.0-CDH3B4-SNAPSHOT.jar
  30. 二進制
      apps/beeswax/hive/lib/hive-hwi-0.7.0-CDH3B4-SNAPSHOT.jar
  31. 二進制
      apps/beeswax/hive/lib/hive-hwi-0.7.0-CDH3B4-SNAPSHOT.war
  32. 二進制
      apps/beeswax/hive/lib/hive-jdbc-0.7.0-CDH3B4-SNAPSHOT.jar
  33. 二進制
      apps/beeswax/hive/lib/hive-metastore-0.7.0-CDH3B4-SNAPSHOT.jar
  34. 二進制
      apps/beeswax/hive/lib/hive-serde-0.7.0-CDH3B4-SNAPSHOT.jar
  35. 二進制
      apps/beeswax/hive/lib/hive-service-0.7.0-CDH3B4-SNAPSHOT.jar
  36. 二進制
      apps/beeswax/hive/lib/hive-shims-0.7.0-CDH3B4-SNAPSHOT.jar
  37. 二進制
      apps/beeswax/hive/lib/jdo2-api-2.3-ec.jar
  38. 二進制
      apps/beeswax/hive/lib/jline-0.9.94.jar
  39. 二進制
      apps/beeswax/hive/lib/json.jar
  40. 二進制
      apps/beeswax/hive/lib/junit-3.8.1.jar
  41. 二進制
      apps/beeswax/hive/lib/libfb303.jar
  42. 二進制
      apps/beeswax/hive/lib/libthrift.jar
  43. 二進制
      apps/beeswax/hive/lib/log4j-1.2.15.jar
  44. 二進制
      apps/beeswax/hive/lib/log4j-1.2.16.jar
  45. 二進制
      apps/beeswax/hive/lib/slf4j-api-1.6.1.jar
  46. 二進制
      apps/beeswax/hive/lib/slf4j-log4j12-1.6.1.jar
  47. 二進制
      apps/beeswax/hive/lib/stringtemplate-3.1b1.jar
  48. 二進制
      apps/beeswax/hive/lib/thrift-0.5.0.jar
  49. 二進制
      apps/beeswax/hive/lib/thrift-fb303-0.5.0.jar
  50. 二進制
      apps/beeswax/hive/lib/velocity-1.5.jar
  51. 二進制
      apps/beeswax/hive/lib/zookeeper-3.3.1.jar
  52. 0 18
      apps/beeswax/hive/scripts/metastore/upgrade/derby/README
  53. 0 27
      apps/beeswax/hive/scripts/metastore/upgrade/derby/upgrade-0.6.0.derby.sql
  54. 0 235
      apps/beeswax/hive/scripts/metastore/upgrade/derby/upgrade-0.7.0.derby.sql
  55. 0 11
      apps/beeswax/hive/scripts/metastore/upgrade/mysql/README
  56. 0 27
      apps/beeswax/hive/scripts/metastore/upgrade/mysql/upgrade-0.6.0.mysql.sql
  57. 0 160
      apps/beeswax/hive/scripts/metastore/upgrade/mysql/upgrade-0.7.0.mysql.sql
  58. 0 27
      apps/beeswax/hive/scripts/metastore/upgrade/postgres/upgrade-0.6.0.postgres.sql
  59. 12 1
      apps/beeswax/java/pom.xml
  60. 8 1
      apps/beeswax/src/beeswax/conf.py
  61. 2 0
      apps/beeswax/src/beeswax/management/commands/beeswax_server.py
  62. 13 1
      apps/beeswax/src/beeswax/test_base.py
  63. 7 0
      desktop/conf.dist/log4j.properties
  64. 12 1
      maven/pom.xml
  65. 23 1
      tools/hudson/build-functions
  66. 1 0
      tools/hudson/hudson.sh

+ 16 - 5
apps/beeswax/beeswax_server.sh

@@ -24,19 +24,29 @@ if [ -z "$HADOOP_HOME" ]; then
   exit 1
 fi
 
+echo \$HADOOP_HOME=$HADOOP_HOME
+
 if [ -z "$HIVE_CONF_DIR" ]; then
   echo "\$HIVE_CONF_DIR must be specified" 1>&2
   exit 1
 fi
 
+echo \$HIVE_CONF_DIR=$HIVE_CONF_DIR
+
+if [ -z "$HIVE_HOME" ]; then
+  echo "\$HIVE_HOME not specified. Defaulting to $HIVE_CONF_DIR/.." 1>&2
+  export HIVE_HOME=$HIVE_CONF_DIR/..
+  exit 1
+fi
+
+echo \$HIVE_HOME=$HIVE_HOME
+
 
 BEESWAX_ROOT=$(dirname $0)
 BEESWAX_JAR=$BEESWAX_ROOT/java-lib/BeeswaxServer.jar
-BEESWAX_HIVE_LIB=$BEESWAX_ROOT/hive/lib
+HIVE_LIB=$HIVE_HOME/lib
 
-echo \$HADOOP_HOME=$HADOOP_HOME
-
-export HADOOP_CLASSPATH=$(find $BEESWAX_HIVE_LIB -name "*.jar" | tr "\n" :)
+export HADOOP_CLASSPATH=$(find $HIVE_LIB -name "*.jar" | tr "\n" :)
 
 if [ -n "$HADOOP_EXTRA_CLASSPATH_STRING" ]; then
   export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$HADOOP_EXTRA_CLASSPATH_STRING
@@ -56,7 +66,8 @@ fi
 if [ -f $HADOOP_CONF_DIR/hadoop-env.sh ]; then
   . $HADOOP_CONF_DIR/hadoop-env.sh
 fi
-export HADOOP_CONF_DIR=$HIVE_CONF_DIR:${BEESWAX_HIVE_LIB}/hive-default-xml-0.7.0-CDH3B4-SNAPSHOT.jar:${HADOOP_CONF_DIR}:$(find $BEESWAX_HIVE_LIB -name "thrift-fb303-0.5.0.jar" | head -1)
+
+export HADOOP_CONF_DIR=$HIVE_CONF_DIR:$BEESWAX_ROOT/../../desktop/conf:$HADOOP_CONF_DIR
 echo \$HADOOP_CONF_DIR=$HADOOP_CONF_DIR
 
 # Note: I've had trouble running this with just "java -jar" with the classpath

+ 4 - 0
apps/beeswax/conf/hue-beeswax.ini

@@ -11,6 +11,10 @@
 # Configure the port the beeswax thrift server runs on
 ## beeswax_server_port=8002
 
+#
+# Hive home directory
+## hive_home_dir=/usr/lib/hive
+
 #
 # Hive configuration directory, where hive-site.xml is located
 ## hive_conf_dir=/etc/hive/conf

+ 0 - 41
apps/beeswax/hive/VERSION_DATA

@@ -1,41 +0,0 @@
-Hive 0.5.0 JARs (with internal patches).
-From git.sf.cloudera.com:hive.git, commit a1db1b1cd430ca6852136dbc03fb13a5070cfc26
-
-commit a1db1b1cd430ca6852136dbc03fb13a5070cfc26
-Author: Philip Zeyliger <philip@cloudera.com>
-Date:   Mon Mar 22 14:21:07 2010 -0700
-
-    CLOUDERA-BUILD.  HIVE-1157. Allowing "add jar" to access remote resources.
-    
-    Fixing checkstyle issues.
-    
-    [CLOUDERA-BUILD is overwritten here; this is Desktop-specific
-    for now.]
-
-commit fbd36c3dc9254803e306d9532ed4324cdbc26dc7
-Author: bcwalrus <bcwalrus@cloudera.com>
-Date:   Sat Mar 20 00:08:16 2010 -0700
-
-    HIVE-1261. ql.metadata.Hive#close() should check for null metaStoreClient
-    
-    Reason: This fix is necessary to implement the Beeswax server to reuse threads for concurrent queries.
-    Author: bc Wong
-    Ref: CDH-834
-
-commit 28496b90881a5c4ada092a2936f830e452ccacf5
-Author: bcwalrus <bcwalrus@cloudera.com>
-Date:   Thu Mar 4 14:43:36 2010 -0800
-
-    Applied HIVE-1211.1.patch, to allow capturing logs from child processes.
-
-commit 76395bc3f9cb36f798205f50d378a70b6a3e86bc
-Author: Philip Zeyliger <philip@cloudera.com>
-Date:   Tue Feb 2 16:10:36 2010 -0800
-
-    Renaming patched datanucleus to have different name from original datanucleus.
-
-commit 6ea2ec6209d6d3f9ca134d55814cfdd9b8821827
-Author: Todd Lipcon <todd@cloudera.com>
-Date:   Wed Aug 26 16:05:16 2009 -0700
-
-    Hot patch to datanucleus jar to fix building inside dirs with '+'

二進制
apps/beeswax/hive/lib/ant-contrib-1.0b3.jar


二進制
apps/beeswax/hive/lib/antlr-runtime-3.0.1.jar


二進制
apps/beeswax/hive/lib/asm-3.1.jar


二進制
apps/beeswax/hive/lib/commons-cli-1.2.jar


二進制
apps/beeswax/hive/lib/commons-codec-1.3.jar


二進制
apps/beeswax/hive/lib/commons-collections-3.2.1.jar


二進制
apps/beeswax/hive/lib/commons-dbcp-1.4.jar


二進制
apps/beeswax/hive/lib/commons-lang-2.4.jar


二進制
apps/beeswax/hive/lib/commons-logging-1.0.4.jar


二進制
apps/beeswax/hive/lib/commons-logging-api-1.0.4.jar


二進制
apps/beeswax/hive/lib/commons-pool-1.5.4.jar


二進制
apps/beeswax/hive/lib/datanucleus-connectionpool-2.0.3.jar


二進制
apps/beeswax/hive/lib/datanucleus-core-2.0.3.jar


二進制
apps/beeswax/hive/lib/datanucleus-enhancer-2.0.3.jar


二進制
apps/beeswax/hive/lib/datanucleus-rdbms-2.0.3.jar


二進制
apps/beeswax/hive/lib/derby.jar


二進制
apps/beeswax/hive/lib/guava-r06.jar


二進制
apps/beeswax/hive/lib/hbase-0.89.0-SNAPSHOT-tests.jar


二進制
apps/beeswax/hive/lib/hbase-0.89.0-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-anttasks-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-cli-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-common-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-contrib-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-default-xml-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-exec-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-hbase-handler-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-hwi-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-hwi-0.7.0-CDH3B4-SNAPSHOT.war


二進制
apps/beeswax/hive/lib/hive-jdbc-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-metastore-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-serde-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-service-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/hive-shims-0.7.0-CDH3B4-SNAPSHOT.jar


二進制
apps/beeswax/hive/lib/jdo2-api-2.3-ec.jar


二進制
apps/beeswax/hive/lib/jline-0.9.94.jar


二進制
apps/beeswax/hive/lib/json.jar


二進制
apps/beeswax/hive/lib/junit-3.8.1.jar


二進制
apps/beeswax/hive/lib/libfb303.jar


二進制
apps/beeswax/hive/lib/libthrift.jar


二進制
apps/beeswax/hive/lib/log4j-1.2.15.jar


二進制
apps/beeswax/hive/lib/log4j-1.2.16.jar


二進制
apps/beeswax/hive/lib/slf4j-api-1.6.1.jar


二進制
apps/beeswax/hive/lib/slf4j-log4j12-1.6.1.jar


二進制
apps/beeswax/hive/lib/stringtemplate-3.1b1.jar


二進制
apps/beeswax/hive/lib/thrift-0.5.0.jar


二進制
apps/beeswax/hive/lib/thrift-fb303-0.5.0.jar


二進制
apps/beeswax/hive/lib/velocity-1.5.jar


二進制
apps/beeswax/hive/lib/zookeeper-3.3.1.jar


+ 0 - 18
apps/beeswax/hive/scripts/metastore/upgrade/derby/README

@@ -1,18 +0,0 @@
-
-1) Shutdown your metastore instance.
-
-2) Perform a backup of your Derby metastore database. Probably
-   the easiest way of doing this is to just create a copy of the
-   Derby database "metastore_db" directory.
-
-3) Execute the Hive 0.6 and Hive 0.7 upgrade scripts:
-   % ij
-   ij version 10.4
-   ij> CONNECT 'jdbc:derby:/Users/bob/hive/metastore_db;databaseName=metastore_db';
-   ij> RUN 'upgrade-0.6.0.derby.sql';
-   ij> RUN 'upgrade-0.7.0.derby.sql';
-   ij> quit;
-
-NOTE: You may need to install the Derby 'ij' utility.
-      Look here for installation instructions:
-      http://db.apache.org/derby/docs/10.4/getstart/

+ 0 - 27
apps/beeswax/hive/scripts/metastore/upgrade/derby/upgrade-0.6.0.derby.sql

@@ -1,27 +0,0 @@
--- HIVE-972: Support views
-ALTER TABLE "TBLS" ADD "VIEW_ORIGINAL_TEXT" LONG VARCHAR DEFAULT NULL;
-ALTER TABLE "TBLS" ADD "VIEW_EXPANDED_TEXT" LONG VARCHAR DEFAULT NULL;
-
--- HIVE-1068: CREATE VIEW followup: add a 'table type' enum
---            attribute in metastore
-ALTER TABLE "TBLS" ADD COLUMN "TBL_TYPE" VARCHAR(128);
-
--- HIVE-675: Add database/schema support for Hive QL
-ALTER TABLE "DBS" ALTER "DESC" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "DBS" ADD "DB_LOCATION_URI" VARCHAR(4000) NOT NULL DEFAULT '';
-
--- HIVE-1364: Increase the maximum length of various metastore fields,
---            and remove TYPE_NAME from COLUMNS primary key
-ALTER TABLE "TBLS" ALTER "OWNER" SET DATA TYPE VARCHAR(767);
-ALTER TABLE "COLUMNS" ALTER "TYPE_NAME" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "PARTITION_KEYS" ALTER "PKEY_COMMENT" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SD_PARAMS" ALTER "PARAM_VALUE" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SDS" ALTER "INPUT_FORMAT" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SDS" ALTER "LOCATION" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SDS" ALTER "OUTPUT_FORMAT" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SERDE_PARAMS" ALTER "PARAM_VALUE" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "SERDES" ALTER "SLIB" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "TABLE_PARAMS" ALTER "PARAM_VALUE" SET DATA TYPE VARCHAR(4000);
-ALTER TABLE "COLUMNS" DROP PRIMARY KEY;
-ALTER TABLE "COLUMNS" ADD PRIMARY KEY ("SD_ID", "COLUMN_NAME");
-ALTER TABLE "PARTITION_PARAMS" ALTER "PARAM_VALUE" SET DATA TYPE VARCHAR(4000);

+ 0 - 235
apps/beeswax/hive/scripts/metastore/upgrade/derby/upgrade-0.7.0.derby.sql

@@ -1,235 +0,0 @@
---
--- HIVE-417 Implement Indexing in Hive
---
-
-CREATE TABLE "IDXS" (
-  "INDEX_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "DEFERRED_REBUILD" CHAR(1) NOT NULL,
-  "INDEX_HANDLER_CLASS" VARCHAR(256),
-  "INDEX_NAME" VARCHAR(128),
-  "INDEX_TBL_ID" BIGINT,
-  "LAST_ACCESS_TIME" INTEGER NOT NULL,
-  "ORIG_TBL_ID" BIGINT,
-  "SD_ID" BIGINT);
-
-ALTER TABLE "IDXS" ADD CONSTRAINT "IDXS_FK1"
-  FOREIGN KEY ("SD_ID") REFERENCES "SDS" ("SD_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "IDXS" ADD CONSTRAINT "IDXS_FK2"
-  FOREIGN KEY ("INDEX_TBL_ID") REFERENCES "TBLS" ("TBL_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "IDXS" ADD CONSTRAINT "IDXS_FK3"
-  FOREIGN KEY ("ORIG_TBL_ID") REFERENCES "TBLS" ("TBL_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "IDXS" ADD CONSTRAINT "IDXS_PK"
-  PRIMARY KEY ("INDEX_ID");
-
-ALTER TABLE "IDXS" ADD CONSTRAINT "DEFERRED_REBUILD_CHECK"
-  CHECK (DEFERRED_REBUILD IN ('Y','N'));
-
-
-CREATE TABLE "INDEX_PARAMS" (
-  "INDEX_ID" BIGINT NOT NULL,
-  "PARAM_KEY" VARCHAR(256) NOT NULL,
-  "PARAM_VALUE" VARCHAR(767));
-
-ALTER TABLE "INDEX_PARAMS" ADD CONSTRAINT "INDEX_PARAMS_FK1"
-  FOREIGN KEY ("INDEX_ID") REFERENCES "IDXS" ("INDEX_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "INDEX_PARAMS" ADD CONSTRAINT "INDEX_PARAMS_PK"
-  PRIMARY KEY ("INDEX_ID", "PARAM_KEY");
-
-CREATE UNIQUE INDEX "UNIQUEINDEX" ON "IDXS" ("INDEX_NAME", "ORIG_TBL_ID");
-
-
---
--- HIVE-1823 Upgrade the database thrift interface to allow parameters key-value pairs
---
-CREATE TABLE "DATABASE_PARAMS" (
-  "DB_ID" BIGINT NOT NULL,
-  "PARAM_KEY" VARCHAR(180) NOT NULL,
-  "PARAM_VALUE" VARCHAR(4000));
-
-ALTER TABLE "DATABASE_PARAMS" ADD CONSTRAINT "DATABASE_PARAMS_FK1"
-  FOREIGN KEY ("DB_ID") REFERENCES "DBS" ("DB_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "DATABASE_PARAMS" ADD CONSTRAINT "DATABASE_PARAMS_PK"
-  PRIMARY KEY ("DB_ID", "PARAM_KEY");
-
-ALTER TABLE "DBS" DROP COLUMN "PARAMETERS";
-
---
--- HIVE-78 Authorization model for Hive
---
-
-CREATE TABLE "DB_PRIVS" (
-  "DB_GRANT_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "DB_ID" BIGINT,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "DB_PRIV" VARCHAR(128));
-
-ALTER TABLE "DB_PRIVS" ADD CONSTRAINT "DB_PRIVS_FK1"
-  FOREIGN KEY ("DB_ID") REFERENCES "DBS" ("DB_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "DB_PRIVS" ADD CONSTRAINT "DB_PRIVS_PK"
-  PRIMARY KEY ("DB_GRANT_ID");
-
-CREATE UNIQUE INDEX "DBPRIVILEGEINDEX" ON "DB_PRIVS" (
-  "DB_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE",
-  "DB_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
-CREATE TABLE "PART_COL_PRIVS" (
-  "PART_COLUMN_GRANT_ID" BIGINT NOT NULL,
-  "COLUMN_NAME" VARCHAR(128),
-  "CREATE_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PART_ID" BIGINT,
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "PART_COL_PRIV" VARCHAR(128));
-
-ALTER TABLE "PART_COL_PRIVS" ADD CONSTRAINT "PART_COL_PRIVS_FK1"
-  FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS" ("PART_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "PART_COL_PRIVS" ADD CONSTRAINT "PART_COL_PRIVS_PK"
-  PRIMARY KEY ("PART_COLUMN_GRANT_ID");
-
-CREATE INDEX "PARTITIONCOLUMNPRIVILEGEINDEX" ON "PART_COL_PRIVS" (
-  "PART_ID", "COLUMN_NAME", "PRINCIPAL_NAME", "PRINCIPAL_TYPE",
-  "PART_COL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-  
-CREATE TABLE "PART_PRIVS" (
-  "PART_GRANT_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PART_ID" BIGINT,
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "PART_PRIV" VARCHAR(128));
-
-ALTER TABLE "PART_PRIVS" ADD CONSTRAINT "PART_PRIVS_FK1"
-  FOREIGN KEY ("PART_ID") REFERENCES "PARTITIONS" ("PART_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "PART_PRIVS" ADD CONSTRAINT "PART_PRIVS_PK"
-  PRIMARY KEY ("PART_GRANT_ID");
-
-CREATE INDEX "PARTPRIVILEGEINDEX" ON "PART_PRIVS" (
-  "PART_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE",
-  "PART_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-
-CREATE TABLE "ROLES" (
-  "ROLE_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "OWNER_NAME" VARCHAR(128),
-  "ROLE_NAME" VARCHAR(128));
-  
-ALTER TABLE "ROLES" ADD CONSTRAINT "ROLES_PK"
-  PRIMARY KEY ("ROLE_ID");
-
-CREATE UNIQUE INDEX "ROLEENTITYINDEX" ON "ROLES" ("ROLE_NAME");
-
-
-CREATE TABLE "ROLE_MAP" (
-  "ROLE_GRANT_ID" BIGINT NOT NULL,
-  "ADD_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "ROLE_ID" BIGINT);  
-
-ALTER TABLE "ROLE_MAP" ADD CONSTRAINT "ROLE_MAP_FK1"
-  FOREIGN KEY ("ROLE_ID") REFERENCES "ROLES" ("ROLE_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "ROLE_MAP" ADD CONSTRAINT "ROLE_MAP_PK"
-  PRIMARY KEY ("ROLE_GRANT_ID");
-
-CREATE UNIQUE INDEX "USERROLEMAPINDEX" ON "ROLE_MAP" (
-  "PRINCIPAL_NAME", "ROLE_ID", "GRANTOR", "GRANTOR_TYPE");
-
-
-CREATE TABLE "TBL_COL_PRIVS" (
-  "TBL_COLUMN_GRANT_ID" BIGINT NOT NULL,
-  "COLUMN_NAME" VARCHAR(128),
-  "CREATE_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "TBL_COL_PRIV" VARCHAR(128),
-  "TBL_ID" BIGINT);
-
-ALTER TABLE "TBL_COL_PRIVS" ADD CONSTRAINT "TBL_COL_PRIVS_FK1"
-  FOREIGN KEY ("TBL_ID") REFERENCES "TBLS" ("TBL_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "TBL_COL_PRIVS" ADD CONSTRAINT "TBL_COL_PRIVS_PK"
-  PRIMARY KEY ("TBL_COLUMN_GRANT_ID");
-
-CREATE INDEX "TABLECOLUMNPRIVILEGEINDEX" ON "TBL_COL_PRIVS" (
-  "TBL_ID", "COLUMN_NAME", "PRINCIPAL_NAME", "PRINCIPAL_TYPE",
-  "TBL_COL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-  
-CREATE TABLE "TBL_PRIVS" (
-  "TBL_GRANT_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "TBL_PRIV" VARCHAR(128),
-  "TBL_ID" BIGINT);
-
-ALTER TABLE "TBL_PRIVS" ADD CONSTRAINT "TBL_PRIVS_FK1"
-  FOREIGN KEY ("TBL_ID") REFERENCES "TBLS" ("TBL_ID")
-  ON DELETE NO ACTION ON UPDATE NO ACTION;
-
-ALTER TABLE "TBL_PRIVS" ADD CONSTRAINT "TBL_PRIVS_PK"
-  PRIMARY KEY ("TBL_GRANT_ID");
-
-CREATE INDEX "TABLEPRIVILEGEINDEX" ON "TBL_PRIVS" (
-  "TBL_ID", "PRINCIPAL_NAME", "PRINCIPAL_TYPE",
-  "TBL_PRIV", "GRANTOR", "GRANTOR_TYPE");
-
-  
-CREATE TABLE "GLOBAL_PRIVS" (
-  "USER_GRANT_ID" BIGINT NOT NULL,
-  "CREATE_TIME" INTEGER NOT NULL,
-  "GRANT_OPTION" SMALLINT NOT NULL,
-  "GRANTOR" VARCHAR(128),
-  "GRANTOR_TYPE" VARCHAR(128),
-  "PRINCIPAL_NAME" VARCHAR(128),
-  "PRINCIPAL_TYPE" VARCHAR(128),
-  "USER_PRIV" VARCHAR(128));
-
-ALTER TABLE "GLOBAL_PRIVS" ADD CONSTRAINT "GLOBAL_PRIVS_PK"
-  PRIMARY KEY ("USER_GRANT_ID");
-
-CREATE UNIQUE INDEX "GLOBALPRIVILEGEINDEX" ON "GLOBAL_PRIVS" (
-  "PRINCIPAL_NAME", "PRINCIPAL_TYPE", "USER_PRIV",
-  "GRANTOR", "GRANTOR_TYPE");

+ 0 - 11
apps/beeswax/hive/scripts/metastore/upgrade/mysql/README

@@ -1,11 +0,0 @@
-
-1) Shutdown your metastore instance.
-
-2) Perform a backup of your MySQL metastore database:
-   % mysqldump --opt <metastore_db_name> > metastore_backup.sql
-
-3) Execute the Hive 0.6 and Hive 0.7 upgrade scripts:
-   % mysql --user=<username> --password=<password> \
-     <metastore_db_name> < upgrade-0.6.0.mysql.sql
-   % mysql --user=<username> --password=<password> \
-     <metastore_db_name> < upgrade-0.7.0.mysql.sql

+ 0 - 27
apps/beeswax/hive/scripts/metastore/upgrade/mysql/upgrade-0.6.0.mysql.sql

@@ -1,27 +0,0 @@
--- HIVE-972: Support views
-ALTER TABLE `TBLS` ADD `VIEW_EXPANDED_TEXT` mediumtext;
-ALTER TABLE `TBLS` ADD `VIEW_ORIGINAL_TEXT` mediumtext;
-
--- HIVE-1068: CREATE VIEW followup: add a 'table type' enum
---            attribute in metastore
-ALTER TABLE `TBLS` ADD `TBL_TYPE` VARCHAR(128);
-
--- HIVE-675: Add database/schema support for Hive QL
-ALTER TABLE `DBS` MODIFY `DESC` VARCHAR(4000);
-ALTER TABLE `DBS` ADD `DB_LOCATION_URI` VARCHAR(4000) NOT NULL DEFAULT '';
-
--- HIVE-1364: Increase the maximum length of various metastore fields,
---            and remove TYPE_NAME from COLUMNS primary key
-ALTER TABLE `TBLS` MODIFY `OWNER` VARCHAR(767);
-ALTER TABLE `COLUMNS` MODIFY `TYPE_NAME` VARCHAR(4000);
-ALTER TABLE `PARTITION_KEYS` MODIFY `PKEY_COMMENT` VARCHAR(4000);
-ALTER TABLE `SD_PARAMS` MODIFY `PARAM_VALUE` VARCHAR(4000);
-ALTER TABLE `SDS` MODIFY `INPUT_FORMAT` VARCHAR(4000);
-ALTER TABLE `SDS` MODIFY `LOCATION` VARCHAR(4000);
-ALTER TABLE `SDS` MODIFY `OUTPUT_FORMAT` VARCHAR(4000);
-ALTER TABLE `SERDE_PARAMS` MODIFY `PARAM_VALUE` VARCHAR(4000);
-ALTER TABLE `SERDES` MODIFY `SLIB` VARCHAR(4000);
-ALTER TABLE `TABLE_PARAMS` MODIFY `PARAM_VALUE` VARCHAR(4000);
-ALTER TABLE `COLUMNS` DROP PRIMARY KEY;
-ALTER TABLE `COLUMNS` ADD PRIMARY KEY (`SD_ID`, `COLUMN_NAME`);
-ALTER TABLE `PARTITION_PARAMS` MODIFY `PARAM_VALUE` VARCHAR(4000);

+ 0 - 160
apps/beeswax/hive/scripts/metastore/upgrade/mysql/upgrade-0.7.0.mysql.sql

@@ -1,160 +0,0 @@
---
--- HIVE-417 Implement Indexing in Hive
---
-CREATE TABLE IF NOT EXISTS `IDXS` (
-  `INDEX_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `DEFERRED_REBUILD` bit(1) NOT NULL,
-  `INDEX_HANDLER_CLASS` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INDEX_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `INDEX_TBL_ID` bigint(20) DEFAULT NULL,
-  `LAST_ACCESS_TIME` int(11) NOT NULL,
-  `ORIG_TBL_ID` bigint(20) DEFAULT NULL,
-  `SD_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`INDEX_ID`),
-  UNIQUE KEY `UNIQUEINDEX` (`INDEX_NAME`,`ORIG_TBL_ID`),
-  KEY `IDXS_FK1` (`SD_ID`),
-  KEY `IDXS_FK2` (`INDEX_TBL_ID`),
-  KEY `IDXS_FK3` (`ORIG_TBL_ID`),
-  CONSTRAINT `IDXS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
-  CONSTRAINT `IDXS_FK2` FOREIGN KEY (`INDEX_TBL_ID`) REFERENCES `TBLS` (`TBL_ID`),
-  CONSTRAINT `IDXS_FK3` FOREIGN KEY (`ORIG_TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `INDEX_PARAMS` (
-  `INDEX_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`),
-  CONSTRAINT `INDEX_PARAMS_FK1` FOREIGN KEY (`INDEX_ID`) REFERENCES `IDXS` (`INDEX_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-
---
--- HIVE-1823 Upgrade the database thrift interface to allow parameters key-value pairs
---
-CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
-  `DB_ID` bigint(20) NOT NULL,
-  `PARAM_KEY` varchar(180) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
-  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`DB_ID`,`PARAM_KEY`),
-  CONSTRAINT `DATABASE_PARAMS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-ALTER TABLE `DBS` DROP COLUMN `PARAMETERS`;
-
---
--- HIVE-78 Authorization model for Hive
---
-CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
-  `DB_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `DB_ID` bigint(20) DEFAULT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `DB_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`DB_GRANT_ID`),
-  UNIQUE KEY `DBPRIVILEGEINDEX` (`DB_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`DB_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `DB_PRIVS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
-  `PART_COLUMN_GRANT_ID` bigint(20) NOT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_ID` bigint(20) DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_COLUMN_GRANT_ID`),
-  KEY `PARTITIONCOLUMNPRIVILEGEINDEX` (`PART_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `PART_COL_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
-  `PART_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_ID` bigint(20) DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PART_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`PART_GRANT_ID`),
-  KEY `PARTPRIVILEGEINDEX` (`PART_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `PART_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `ROLES` (
-  `ROLE_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `ROLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`ROLE_ID`),
-  UNIQUE KEY `ROLEENTITYINDEX` (`ROLE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `ROLE_MAP` (
-  `ROLE_GRANT_ID` bigint(20) NOT NULL,
-  `ADD_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `ROLE_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`ROLE_GRANT_ID`),
-  UNIQUE KEY `USERROLEMAPINDEX` (`PRINCIPAL_NAME`,`ROLE_ID`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `ROLE_MAP_FK1` FOREIGN KEY (`ROLE_ID`) REFERENCES `ROLES` (`ROLE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
-  `TBL_COLUMN_GRANT_ID` bigint(20) NOT NULL,
-  `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`TBL_COLUMN_GRANT_ID`),
-  KEY `TABLECOLUMNPRIVILEGEINDEX` (`TBL_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `TBL_COL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
-  `TBL_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `TBL_ID` bigint(20) DEFAULT NULL,
-  PRIMARY KEY (`TBL_GRANT_ID`),
-  KEY `TABLEPRIVILEGEINDEX` (`TBL_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
-  CONSTRAINT `TBL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS` (`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-
-CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
-  `USER_GRANT_ID` bigint(20) NOT NULL,
-  `CREATE_TIME` int(11) NOT NULL,
-  `GRANT_OPTION` smallint(6) NOT NULL,
-  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  `USER_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
-  PRIMARY KEY (`USER_GRANT_ID`),
-  UNIQUE KEY `GLOBALPRIVILEGEINDEX` (`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`USER_PRIV`,`GRANTOR`,`GRANTOR_TYPE`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;

+ 0 - 27
apps/beeswax/hive/scripts/metastore/upgrade/postgres/upgrade-0.6.0.postgres.sql

@@ -1,27 +0,0 @@
--- HIVE-972: Support views
-ALTER TABLE "TBLS" ADD COLUMN "VIEW_EXPANDED_TEXT" text;
-ALTER TABLE "TBLS" ADD COLUMN "VIEW_ORIGINAL_TEXT" text;
-
--- HIVE-1068: CREATE VIEW followup: add a 'table type' enum
---            attribute in metastore
-ALTER TABLE "TBLS" ADD COLUMN "TBL_TYPE" character varying(128);
-
--- HIVE-675: Add database/schema support for Hive QL
-ALTER TABLE "DBS" ALTER "DESC" TYPE character varying(4000);
-ALTER TABLE "DBS" ADD COLUMN "DB_LOCATION_URI" character varying(4000) NOT NULL DEFAULT ''::character varying;
-
--- HIVE-1364: Increase the maximum length of various metastore fields,
---            and remove TYPE_NAME from COLUMNS primary key
-ALTER TABLE "TBLS" ALTER "OWNER" TYPE character varying(767);
-ALTER TABLE "COLUMNS" ALTER "TYPE_NAME" TYPE character varying(4000);
-ALTER TABLE "PARTITION_KEYS" ALTER "PKEY_COMMENT" TYPE character varying(4000);
-ALTER TABLE "SD_PARAMS" ALTER "PARAM_VALUE" TYPE character varying(4000);
-ALTER TABLE "SDS" ALTER "INPUT_FORMAT" TYPE character varying(4000);
-ALTER TABLE "SDS" ALTER "LOCATION" TYPE character varying(4000);
-ALTER TABLE "SDS" ALTER "OUTPUT_FORMAT" TYPE character varying(4000);
-ALTER TABLE "SERDE_PARAMS" ALTER "PARAM_VALUE" TYPE character varying(4000);
-ALTER TABLE "SERDES" ALTER "SLIB" TYPE character varying(4000);
-ALTER TABLE "TABLE_PARAMS" ALTER "PARAM_VALUE" TYPE character varying(4000);
-ALTER TABLE "COLUMNS" DROP CONSTRAINT "COLUMNS_pkey";
-ALTER TABLE "COLUMNS" ADD CONSTRAINT "COLUMNS_pkey" PRIMARY KEY ("SD_ID", "COLUMN_NAME");
-ALTER TABLE "PARTITION_PARAMS" ALTER "PARAM_VALUE" TYPE character varying(4000);

+ 12 - 1
apps/beeswax/java/pom.xml

@@ -51,6 +51,18 @@
       <groupId>org.apache.hadoop.hive</groupId>
       <artifactId>hive-exec</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop.hive</groupId>
+      <artifactId>hive-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libfb303</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
@@ -123,4 +135,3 @@
     </plugins>
   </build>
 </project>
-      

+ 8 - 1
apps/beeswax/src/beeswax/conf.py

@@ -56,10 +56,17 @@ BEESWAX_SERVER_HEAPSIZE = Config(
     "may override this setting.",
   default="1000")
 
+BEESWAX_HIVE_HOME_DIR = Config(
+  key="hive_home_dir",
+  default=os.environ.get("HIVE_HOME", "/usr/lib/hive"),
+  help=("Path to the root of the Hive installation; " +
+        "defaults to environment variable when not set.")
+)
+
 BEESWAX_HIVE_CONF_DIR = Config(
   key='hive_conf_dir',
   help='Hive configuration directory, where hive-site.xml is located',
-  default='/etc/hive/conf')
+  default=os.environ.get("HIVE_CONF_DIR", '/etc/hive/conf'))
 
 LOCAL_EXAMPLES_DATA_DIR = Config(
   key='local_examples_data_dir',

+ 2 - 0
apps/beeswax/src/beeswax/management/commands/beeswax_server.py

@@ -35,6 +35,8 @@ class Command(NoArgsCommand):
     env['HADOOP_HOME'] = hadoop.conf.HADOOP_HOME.get()
     if hadoop.conf.HADOOP_CONF_DIR.get():
       env['HADOOP_CONF_DIR'] = hadoop.conf.HADOOP_CONF_DIR.get()
+    if beeswax.conf.BEESWAX_HIVE_HOME_DIR.get():
+      env['HIVE_HOME'] = beeswax.conf.BEESWAX_HIVE_HOME_DIR.get()
     if beeswax.conf.BEESWAX_HIVE_CONF_DIR.get():
       env['HIVE_CONF_DIR'] = beeswax.conf.BEESWAX_HIVE_CONF_DIR.get()
     if beeswax.conf.BEESWAX_SERVER_HEAPSIZE.get():

+ 13 - 1
apps/beeswax/src/beeswax/test_base.py

@@ -69,6 +69,7 @@ def _start_server(cluster):
     'HADOOP_HOME': hadoop.conf.HADOOP_HOME.get(),
     'HADOOP_CONF_DIR': cluster.config_dir,
     'HIVE_CONF_DIR': beeswax.conf.BEESWAX_HIVE_CONF_DIR.get(),
+    'HIVE_HOME' : beeswax.conf.BEESWAX_HIVE_HOME_DIR.get(),
     'HADOOP_EXTRA_CLASSPATH_STRING': hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
   }
   if os.getenv("JAVA_HOME"):
@@ -81,17 +82,28 @@ def _start_server(cluster):
 
 
 def get_shared_beeswax_server():
+  # Copy hive-default.xml from BEESWAX_HIVE_CONF_DIR before it is set to
+  # /my/bogus/path
+  default_xml = file(beeswax.conf.BEESWAX_HIVE_CONF_DIR.get()+"/hive-default.xml").read()
+
   finish = (
     beeswax.conf.BEESWAX_SERVER_HOST.set_for_testing("localhost"),
     beeswax.conf.BEESWAX_SERVER_PORT.set_for_testing(BEESWAXD_TEST_PORT),
     beeswax.conf.BEESWAX_META_SERVER_HOST.set_for_testing("localhost"),
     beeswax.conf.BEESWAX_META_SERVER_PORT.set_for_testing(BEESWAXD_TEST_PORT + 1),
     # Use a bogus path to avoid loading the normal hive-site.xml
-    beeswax.conf.BEESWAX_HIVE_CONF_DIR.set_for_testing('/my/bogus/path'),
+    beeswax.conf.BEESWAX_HIVE_CONF_DIR.set_for_testing('/my/bogus/path')
   )
 
   cluster = mini_cluster.shared_cluster(conf=True)
 
+  # Copy hive-default.xml into the mini_cluster's conf dir, which happens to be
+  # in the cluster's tmpdir. This tmpdir is determined during the mini_cluster
+  # startup, during which BEESWAX_HIVE_CONF_DIR needs to be set to
+  # /my/bogus/path. Hence the step of writing to memory.
+  # hive-default.xml will get picked up by the beeswax_server during startup
+  file(cluster.tmpdir+"/conf/hive-default.xml", 'w').write(default_xml)
+
   global _SHARED_BEESWAX_SERVER_PROCESS
   if _SHARED_BEESWAX_SERVER_PROCESS is None:
     p = _start_server(cluster)

+ 7 - 0
desktop/conf.dist/log4j.properties

@@ -52,3 +52,10 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}:
 #log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop
+# Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter

+ 12 - 1
maven/pom.xml

@@ -77,6 +77,11 @@
         <artifactId>hive-exec</artifactId>
         <version>${hive.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop.hive</groupId>
+        <artifactId>hive-common</artifactId>
+        <version>${hive.version}</version>
+      </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
@@ -126,6 +131,12 @@
         <artifactId>libthrift</artifactId>
         <version>0.5.0-cdh</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.thrift</groupId>
+        <artifactId>libfb303</artifactId>
+        <version>0.5.0-cdh</version>
+      </dependency>
+
 
     </dependencies>
   </dependencyManagement>
@@ -150,4 +161,4 @@
   </repositories>
 
 
-</project>
+</project>

+ 23 - 1
tools/hudson/build-functions

@@ -21,7 +21,7 @@
 # If not specified, it uses the $CDH variable to select an archive location.
 #
 
-CDH_URL=${CDH_URL:-http://nightly.cloudera.com/cdh/3/hadoop-0.20.2-CDH3B4-SNAPSHOT.tar.gz}
+CDH_URL=${CDH_URL:-http://archive.cloudera.com/cdh/3/hadoop-0.20.2-CDH3B4.tar.gz}
 
 CDH_TGZ=$(basename $CDH_URL)
 CDH_VERSION=${CDH_TGZ/.tar.gz/}
@@ -41,3 +41,25 @@ build_hadoop() {
   tar -C $HADOOP_DIR -xzf $CDH_CACHE
   export HADOOP_HOME="$HADOOP_DIR/${CDH_VERSION}"
 }
+
+HIVE_URL=${HIVE_URL:-http://archive.cloudera.com/cdh/3/hive-0.7.0-CDH3B4.tar.gz}
+
+HIVE_TGZ=$(basename $HIVE_URL)
+HIVE_VERSION=${HIVE_TGZ/.tar.gz/}
+HIVE_CACHE="$HOME/.hue_cache/${HIVE_TGZ}"
+
+build_hive() {
+  if [ ! -f $HOME/.hue_cache ]; then
+    mkdir -p $HOME/.hue_cache
+    echo "Downloading $HIVE_URL..."
+    wget $HIVE_URL -O $HIVE_CACHE
+  fi
+
+  HIVE_DIR=$HUE_ROOT/ext/hive
+
+  mkdir -p $HIVE_DIR
+  echo "Unpacking $HIVE_CACHE to $HIVE_DIR"
+  tar -C $HIVE_DIR -xzf $HIVE_CACHE
+  export HIVE_HOME="$HIVE_DIR/${HIVE_VERSION}"
+  export HIVE_CONF_DIR=$HIVE_HOME/conf
+}

+ 1 - 0
tools/hudson/hudson.sh

@@ -39,6 +39,7 @@ else
 fi
 
 build_hadoop
+build_hive
 
 make apps