Răsfoiți Sursa

[core] Remove some Java dependencies

Keep plugin as is for now as no real need to optimize its build.
Romain Rigaux 12 ani în urmă
părinte
comite
231c8d38c1

+ 11 - 14
README.rst

@@ -5,17 +5,17 @@ Welcome to the repository for Hue
 Hue is both a Web UI for Hadoop and a framework to create interactive Web
 applications. It features:
 
-      * FileBrowser for accessing HDFS      
+      * File Browser for accessing HDFS
       * Beeswax application for executing Hive queries
       * Impala App for executing Cloudera Impala queries
-      * Oozie App for submitting and scheduling workflows and bundles
-      * Pig App for submitting Pig scripts
+      * Oozie App for submitting and monitoring workflows, coordinators and bundles
+      * Pig Editor for submitting Pig scripts
       * HBase Browser for exploring and modifying HBase tables and data
-      * Table Browser for accessing Hive metadata
+      * Table Browser for accessing Hive metadata and HCatalog
       * Search app for querying Solr and Solr Cloud
-      * JobBrowser for accessing MapReduce jobs (MR1/MR2-YARN)
+      * Job Browser for accessing MapReduce jobs (MR1/MR2-YARN)
       * Job Designer for creating MapReduce/Streaming/Java jobs
-      * A Pig/HBase/Sqoop2 shell
+      * A Sqoop2 editor and dashboard
 
 On top of that, a SDK is available for creating new apps integrated with Hadoop.
 
@@ -31,16 +31,12 @@ To build and get the core server running::
     $ make apps
     $ build/env/bin/hue runserver
 
-If using the Beeswax application, start the daemon::
-
-    $ build/env/bin/hue beeswax_server
-
-Now Hue should be running on http://localhost:8000.
+Now Hue should be running on http://localhost:8000 !
 
 The configuration in development mode is ``desktop/conf/pseudo-distributed.ini``.
 
 
-Note: to start all the servers in one command (but lose the automatic reloading after source modification)::
+Note: to start the production server (but lose the automatic reloading after source modification)::
 
    $ build/env/bin/supervisor
 
@@ -140,8 +136,8 @@ namespace.  See ``apps/about/src/about/urls.py`` for an example.
 Main Stack
 ==========
 
-   * Python 2.4 - 2.7
-   * Django 1.2 https://docs.djangoproject.com/en/1.2/
+   * Python 2.6 - 2.7
+   * Django 1.4.5 https://docs.djangoproject.com/en/1.4/
    * Mako
    * jQuery
    * Bootstrap
@@ -151,6 +147,7 @@ Community
 =========
    * User group: http://groups.google.com/a/cloudera.org/group/hue-user
    * Jira: https://issues.cloudera.org/browse/HUE
+   * Reviews: https://review.cloudera.org/dashboard/?view=to-group&group=hue (repo 'hue-rw')
 
 
 License

+ 2 - 46
desktop/libs/hadoop/Makefile

@@ -35,6 +35,7 @@ env-install: compile ext-env-install
 	@echo '--- Installing $(APP_NAME) into virtual-env'
 	@$(ENV_PYTHON) setup.py develop -N -q
 
+
 ###################################
 # Build Hadoop plugins
 ###################################
@@ -44,19 +45,7 @@ PLUGIN_JAVA_LIB := $(PLUGIN_DIR)/java-lib
 PLUGIN_JAVA_DIR := $(PLUGIN_DIR)/java
 BLD_DIR_PLUGINS := $(PLUGIN_DIR)/java/target
 
-SUDO_SHELL_DIR := $(realpath .)/sudo-shell
-SUDO_SHELL_SRC := $(SUDO_SHELL_DIR)/src
-SUDO_SHELL_BUILD := $(SUDO_SHELL_DIR)/target
-SUDO_SHELL_LIB := $(SUDO_SHELL_DIR)/java-lib
-SUDO_SHELL := $(SUDO_SHELL_LIB)/sudo-shell-$(DESKTOP_VERSION).jar
-
-CREDENTIALS_MERGER_DIR := $(realpath .)/credentials-merger
-CREDENTIALS_MERGER_SRC := $(CREDENTIALS_MERGER_DIR)/src
-CREDENTIALS_MERGER_BUILD := $(CREDENTIALS_MERGER_DIR)/target
-CREDENTIALS_MERGER_LIB := $(CREDENTIALS_MERGER_DIR)/java-lib
-CREDENTIALS_MERGER := $(CREDENTIALS_MERGER_LIB)/credentials-merger-$(DESKTOP_VERSION).jar
-
-compile: $(DESKTOP_PLUGIN_JAR) $(SUDO_SHELL) $(CREDENTIALS_MERGER)
+compile: $(DESKTOP_PLUGIN_JAR)
 
 # Build the plugin jar iff the source is present
 ifneq (,$(wildcard $(PLUGIN_JAVA_DIR)))
@@ -74,42 +63,9 @@ $(DESKTOP_PLUGIN_JAR):
 	$(error Cannot build hadoop plugin without source)
 endif
 
-# Build the sudo shell jar iff the source is present
-ifneq (,$(wildcard $(SUDO_SHELL_SRC)))
-$(SUDO_SHELL): $(shell find $(SUDO_SHELL_SRC) -type f)
-	mkdir -p $(SUDO_SHELL_LIB)
-	@echo "--- Building sudo fsshell tool"
-	cd $(SUDO_SHELL_DIR) && mvn clean install -DskipTests $(MAVEN_OPTIONS)
-	cp $(SUDO_SHELL_BUILD)/sudo-shell-$(MAVEN_VERSION).jar $(SUDO_SHELL)
-
-clean::
-	rm -rf $(SUDO_SHELL_LIB)
-	cd $(SUDO_SHELL_DIR) && mvn clean $(MAVEN_OPTIONS) ||:
-else
-$(SUDO_SHELL):
-	$(error cannot build sudo-shell jar without source)
-endif
-
-# Build the credentials merger jar iff the source is present
-ifneq (,$(wildcard $(CREDENTIALS_MERGER_SRC)))
-$(CREDENTIALS_MERGER): $(shell find $(CREDENTIALS_MERGER_SRC) -type f)
-	mkdir -p $(CREDENTIALS_MERGER_LIB)
-	@echo "--- Building credentials merger tool"
-	cd $(CREDENTIALS_MERGER_DIR) && mvn clean install -DskipTests $(MAVEN_OPTIONS)
-	cp $(CREDENTIALS_MERGER_BUILD)/credentials-merger-$(MAVEN_VERSION).jar $(CREDENTIALS_MERGER)
-
-clean::
-	rm -rf $(CREDENTIALS_MERGER_LIB)
-	cd $(CREDENTIALS_MERGER_DIR) && mvn clean $(MAVEN_OPTIONS) ||:
-else
-$(CREDENTAILS_MERGER):
-	$(error cannot build credentials merger jar without source)
-endif
-
 #
 # Tell the `bdist' target to exclude our java source.
 #
 BDIST_EXCLUDES += \
 	--exclude=java \
-	--exclude=sudo-shell/src \
 	--exclude=regenerate-thrift.sh

+ 0 - 74
desktop/libs/hadoop/credentials-merger/pom.xml

@@ -1,74 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to Cloudera, Inc. under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<project>
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>com.cloudera.hue</groupId>
-    <artifactId>hue-parent</artifactId>
-    <relativePath>../../../../maven/pom.xml</relativePath>
-    <version>3.0.0-SNAPSHOT</version>
-  </parent>
-
-  <groupId>com.cloudera.hue</groupId>
-  <artifactId>credentials-merger</artifactId>
-  <packaging>jar</packaging>
-
-  <name>Hue Credentials File Merger Utility</name>
-
-  <scm>
-   <connection>scm:git:git://github.com/cloudera/hue.git</connection>
-   <developerConnection>scm:git:git@github.com:cloudera/hue.git</developerConnection>
-   <url>https://github.com/cloudera/hue</url>
-  </scm>  
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-client</artifactId>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.0</version>
-        <configuration>
-          <source>${javaVersion}</source>
-          <target>${targetJavaVersion}</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifest>
-              <mainClass>com.cloudera.hue.CredentialsMerger</mainClass>
-            </manifest>
-          </archive>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
-      

+ 0 - 84
desktop/libs/hadoop/credentials-merger/src/main/java/com/cloudera/hue/CredentialsMerger.java

@@ -1,84 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package com.cloudera.hue;
-
-import java.io.BufferedReader;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.File;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.token.Token;
-
-/**
- * A tool to merge the credentials of multiple distinct files containing Hadoop
- * delegation tokens into a single file.
- */
-public class CredentialsMerger {
-
-  /**
-   * Merge several credentials files into one. Give the desired output file
-   * first, followed by all of the input files.
-   *
-   * <p>File formats are tried in this order: TokenStorageFile, urlEncodedString.
-   * </p>
-   *
-   * @param args &lt;out&gt; &lt;in1&gt; ...
-   * @throws IOException  in the event of an error reading or writing files.
-   */
-  public static void main(String[] args) throws IOException {
-    if (args.length < 2) {
-      printUsage();
-      System.exit(1);
-    }
-
-    Path outputFile = new Path("file://" + new File(args[0]).getAbsolutePath());
-    Configuration conf = new Configuration();
-    Credentials credentials = new Credentials();
-
-    for (int i = 1; i < args.length; i++) {
-      try {
-        Credentials singleFileCredentials = Credentials.readTokenStorageFile(
-            new Path("file://" + new File(args[i]).getAbsolutePath()), conf);
-        credentials.addAll(singleFileCredentials);
-      } catch (IOException e) {
-        BufferedReader reader = new BufferedReader(new FileReader(args[i]));
-        try {
-          // Retry to read the token with an encodedUrl format
-          Token<?> token = new Token();
-          String encodedtoken = reader.readLine();
-          token.decodeFromUrlString(encodedtoken);
-          credentials.addToken(new Text(args[i]), token);
-        } finally {
-          reader.close();
-        }
-      }
-    }
-
-    credentials.writeTokenStorageFile(outputFile, conf);
-  }
-
-  /**
-   * Show command usage.
-   */
-  private static void printUsage() {
-    System.err.println("Usage: " + CredentialsMerger.class.getCanonicalName()
-        + " <dst> <src> ...");
-  }
-}

+ 0 - 1
desktop/libs/hadoop/java/lib/README

@@ -1 +0,0 @@
-Thrift is from http://developers.facebook.com/thrift/.

+ 0 - 24
desktop/libs/hadoop/java/lib/Thrift.LICENSE

@@ -1,24 +0,0 @@
-Thrift Software License
-Copyright (c) 2006- Facebook, Inc.
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.

BIN
desktop/libs/hadoop/java/lib/libthrift.jar


+ 0 - 74
desktop/libs/hadoop/sudo-shell/pom.xml

@@ -1,74 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to Cloudera, Inc. under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<project>
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>com.cloudera.hue</groupId>
-    <artifactId>hue-parent</artifactId>
-    <relativePath>../../../../maven/pom.xml</relativePath>
-    <version>3.0.0-SNAPSHOT</version>
-  </parent>
-
-  <groupId>com.cloudera.hue</groupId>
-  <artifactId>sudo-shell</artifactId>
-  <packaging>jar</packaging>
-
-  <name>Hue Sudo Shell</name>
-
-  <scm>
-   <connection>scm:git:git://github.com/cloudera/hue.git</connection>
-   <developerConnection>scm:git:git@github.com:cloudera/hue.git</developerConnection>
-   <url>https://github.com/cloudera/hue</url>
-  </scm>  
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-client</artifactId>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.0</version>
-        <configuration>
-          <source>${javaVersion}</source>
-          <target>${targetJavaVersion}</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifest>
-              <mainClass>com.cloudera.hue.SudoFsShell</mainClass>
-            </manifest>
-          </archive>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
-      

+ 0 - 58
desktop/libs/hadoop/sudo-shell/src/main/java/com/cloudera/hue/SudoFsShell.java

@@ -1,58 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package com.cloudera.hue;
-
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.security.UserGroupInformation;
-import java.security.PrivilegedExceptionAction;
-
-/**
- * Tool that allows a user with proxyuser privileges to act on behalf
- * of another user on HDFS. Hue currently uses this tool in order to
- * upload files using the "-put" shell command on behalf of the logged-in
- * user.
- */
-public class SudoFsShell {
-  private static void usage() {
-    System.err.println("usage: SudoFsShell <username> <shell args ...>");
-  }
-
-  public static void main(String []args) throws Exception {
-    if (args.length < 1) {
-      usage();
-      System.exit(1);
-    }
-
-    String username = args[0];
-    final String shellArgs[] = new String[args.length - 1];
-    System.arraycopy(args, 1, shellArgs, 0, args.length - 1);
-
-    UserGroupInformation sudoUgi;
-    if (UserGroupInformation.isSecurityEnabled()) {
-      sudoUgi = UserGroupInformation.createProxyUser(
-        username, UserGroupInformation.getCurrentUser());
-    } else {
-      sudoUgi = UserGroupInformation.createRemoteUser(username);
-    }
-
-    sudoUgi.doAs(new PrivilegedExceptionAction<Void>() {
-        public Void run() throws Exception {
-          FsShell.main(shellArgs);
-          return null;
-        }
-      });
-  }
-}

+ 1 - 1
dist/README

@@ -53,4 +53,4 @@ Search or ask questions on the forum and http://groups.google.com/a/cloudera.org
 
 Some packages might be required during the 'make install': https://github.com/cloudera/hue#development-prerequisites)
 
-Download a pre-built packaged version of Hue on http://gethue.com
+Download a pre-built packaged version (apt-get/yum install...) of Hue on http://gethue.com

+ 7 - 22
docs/manual.txt

@@ -45,7 +45,7 @@ overly restrictive firewalls. For small clusters of less than 10 nodes,
 you can use your existing master node as the Hue Server.
 
 You can download the Hue tarball here:
-http://github.com/cloudera/hue/downloads/
+gethue.tumblr.com/tagged/release
 
 Hue Dependencies
 ^^^^^^^^^^^^^^^^
@@ -88,8 +88,6 @@ Configure `$PREFIX` with the path where you want to install Hue by running:
 
   $ PREFIX=/usr/share make install
   $ cd /usr/share/hue
-  $ sudo chmod 4750 apps/shell/src/shell/build/setuid
-  $ sudo chown root:$USER apps/shell/src/shell/build/setuid
 
 You can install Hue anywhere on your system, and run Hue as a non-root user.
 The Shell application needs root privileges to launch various sub-processes as
@@ -141,9 +139,9 @@ proceeding.
   Yarn        No         JobDesigner, Beeswax               Transitive dependency via Hive or Oozie
   Oozie       No         JobDesigner, Oozie                 Oozie access through REST API
   Hive        No         Beeswax                            Beeswax uses the Hive client libraries
-  Flume       No         Shell                              Optionally provides access to the Flume shell
-  HBase       No         Shell                              Optionally provides access to the HBase shell
-  Pig         No         Shell                              Optionally provides access to the Pig shell
+  HBase       No         HBase Browser                      Requires Thrift 1 service
+  Pig         No         Pig Editor                         Requires Oozie
+  Sqoop2      No         Sqoop Editor                       Requires Sqoop2 server
 -------------------------------------------------------------------------------------
 
 
@@ -305,7 +303,7 @@ Hive Configuration
 ~~~~~~~~~~~~~~~~~~
 
 Hue's Beeswax application helps you use Hive to query your data.
-It depends on a Hive installation on your system. Please read
+It depends on a Hive Server 2 running in the cluster. Please read
 this section to ensure a proper integration.
 
 Your Hive data is stored in HDFS, normally under `/user/hive/warehouse`
@@ -315,15 +313,7 @@ the users whom you expect to be creating tables.  `/tmp` (on the local file
 system) must be world-writable (1777), as Hive makes extensive use of it.
 
 [NOTE]
-If you used the embedded Hive MetaStore functionality of Beeswax in Hue from
-versions prior to Hue 1.2, read this section. Hue 1.2 includes changes in the
-Hive MetaStore schema that are part of the Hive 0.7 release. If you want to use
-Beeswax in Hue 1.2, it is imperative that you upgrade the Hive MetaStore schema
-by running the appropriate schema upgrade script located in the
-`apps/beeswax/hive/scripts/metastore/upgrade` directory in the Hue installation.
-Scripts for Derby and MySQL databases are available. If you are using a
-different database for your MetaStore, you will need to provide your own
-upgrade script.
+
 
 No Existing Hive Installation
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -572,7 +562,7 @@ In the `[beeswax]` section of the configuration file, you can
 _optionally_ specify the following:
 
 beeswax_server_host::
-  The hostname or IP that the Beeswax Server should bind to. By
+  The hostname or IP that the Hive Server should bind to. By
   default it binds to `localhost`, and therefore only serves local
   IPC clients.
 
@@ -583,9 +573,6 @@ hive_conf_dir::
   The directory containing your `hive-site.xml` Hive
   configuration file.
 
-beeswax_server_heapsize::
-  The heap size (-Xmx) of the Beeswax Server.
-
 
 JobDesigner and Oozie Configuration
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -657,7 +644,6 @@ A standard Hue installation starts and monitors the following processes:
 
 * `runcpserver` - a web server based on CherryPy that provides the core web
 functionality of Hue
-* `beeswax server` - a daemon that manages concurrent Hive queries
 
 If you have installed other applications into your Hue instance, you may see
 other daemons running under the supervisor as well.
@@ -666,7 +652,6 @@ You can see the supervised processes running in the output of `ps -f -u hue`:
 
   UID        PID  PPID  C STIME TTY          TIME CMD
   hue       8685  8679  0 Aug05 ?        00:01:39 /usr/share/hue/build/env/bin/python /usr/share/hue/build/env/bin/desktop runcpserver
-  hue       8695  8679  0 Aug05 ?        00:00:06 /usr/java/jdk1.6.0_14/bin/java -Xmx1000m -Dhadoop.log.dir=/usr/lib/hadoop-0.20/logs -Dhadoop.log.file=hadoop.log ...
 
 Note that the supervisor automatically restarts these processes if they fail for
 any reason. If the processes fail repeatedly within a short time, the supervisor

+ 3 - 19
maven/pom.xml

@@ -43,9 +43,9 @@
 
   <properties>
     <hadoop.version>2.1.0-mr1-cdh5.0.0-SNAPSHOT</hadoop.version>
-    <hive.version>0.11.0-cdh5.0.0-SNAPSHOT</hive.version>
     <slf4j.version>1.6.1</slf4j.version>
     <commons-logging.version>1.0.4</commons-logging.version>
+    <thrift.version>0.9.0</thrift.version>
     <javaVersion>1.6</javaVersion>
     <targetJavaVersion>1.6</targetJavaVersion>
   </properties>
@@ -133,21 +133,6 @@
         <artifactId>hadoop-client</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
-      <dependency>
-        <groupId>org.apache.hive</groupId>
-        <artifactId>hive-metastore</artifactId>
-        <version>${hive.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hive</groupId>
-        <artifactId>hive-exec</artifactId>
-        <version>${hive.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hive</groupId>
-        <artifactId>hive-common</artifactId>
-        <version>${hive.version}</version>
-      </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
@@ -195,15 +180,14 @@
       <dependency>
         <groupId>org.apache.thrift</groupId>
         <artifactId>libthrift</artifactId>
-        <version>0.9.0</version>
+        <version>${thrift.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.thrift</groupId>
         <artifactId>libfb303</artifactId>
-        <version>0.9.0</version>
+        <version>${thrift.version}</version>
       </dependency>
 
-
     </dependencies>
   </dependencyManagement>