build-functions 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. #!/bin/bash
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. # Common functions for hudson build scripts
  18. #
  19. # Use $CDH_URL to control where to download Hadoop.
  20. # If not specified, it uses the $CDH variable to select an archive location.
  21. #
  22. CDH_URL=${CDH_URL:-http://nightly.cloudera.com/cdh4/cdh/4/hadoop-0.23.1-cdh4b2-SNAPSHOT.tar.gz}
  23. CDH_TGZ=$(basename $CDH_URL)
  24. CDH_VERSION=${CDH_TGZ/.tar.gz/}
  25. CDH_CACHE="$HOME/.hue_cache/${CDH_TGZ}"
  26. build_hadoop() {
  27. if [ ! -f $CDH_CACHE ]; then
  28. mkdir -p $HOME/.hue_cache
  29. echo "Downloading $CDH_URL..."
  30. wget $CDH_URL -O $CDH_CACHE
  31. fi
  32. HADOOP_DIR=$HUE_ROOT/ext/hadoop
  33. export HADOOP_HDFS_HOME="$HADOOP_DIR/${CDH_VERSION}"
  34. export HADOOP_BIN=$HADOOP_HDFS_HOME/bin/hadoop
  35. mkdir -p $HADOOP_DIR
  36. rm -rf $HADOOP_HOME
  37. echo "Unpacking $CDH_CACHE to $HADOOP_DIR"
  38. tar -C $HADOOP_DIR -xzf $CDH_CACHE
  39. }
  40. ##########
  41. MR1_URL=${MR1_URL:-http://nightly.cloudera.com/cdh4/cdh/4/mr1-0.23.1-mr1-cdh4b2-SNAPSHOT.tar.gz}
  42. MR1_TGZ=$(basename $MR1_URL)
  43. MR1_VERSION=${MR1_TGZ/.tar.gz/}
  44. MR1_VERSION=${MR1_VERSION/mr1/hadoop}
  45. MR1_CACHE="$HOME/.hue_cache/${MR1_TGZ}"
  46. build_mr1() {
  47. if [ ! -f $MR1_CACHE ]; then
  48. mkdir -p $HOME/.hue_cache
  49. echo "Downloading $MR1_URL..."
  50. wget $MR1_URL -O $MR1_CACHE
  51. fi
  52. MR1_DIR=$HUE_ROOT/ext/mr1
  53. export HADOOP_MR1_HOME="$MR1_DIR/${MR1_VERSION}"
  54. export HADOOP_MR1_BIN="$HADOOP_MR1_HOME/bin/hadoop"
  55. mkdir -p $MR1_DIR
  56. rm -rf $HADOOP_MR1_HOME
  57. echo "Unpacking $MR1_CACHE to $MR1_DIR"
  58. tar -C $MR1_DIR -xzf $MR1_CACHE
  59. }
  60. ##########
  61. HIVE_URL=${HIVE_URL:-http://nightly.cloudera.com/cdh4/cdh/4/hive-0.8.1-cdh4b2-SNAPSHOT.tar.gz}
  62. HIVE_TGZ=$(basename $HIVE_URL)
  63. HIVE_VERSION=${HIVE_TGZ/.tar.gz/}
  64. HIVE_CACHE="$HOME/.hue_cache/${HIVE_TGZ}"
  65. build_hive() {
  66. if [ ! -f $HOME/.hue_cache ]; then
  67. mkdir -p $HOME/.hue_cache
  68. echo "Downloading $HIVE_URL..."
  69. wget $HIVE_URL -O $HIVE_CACHE
  70. fi
  71. HIVE_DIR=$HUE_ROOT/ext/hive
  72. export HIVE_HOME="$HIVE_DIR/${HIVE_VERSION}"
  73. mkdir -p $HIVE_DIR
  74. rm -rf $HIVE_HOME
  75. echo "Unpacking $HIVE_CACHE to $HIVE_DIR"
  76. tar -C $HIVE_DIR -xzf $HIVE_CACHE
  77. export HIVE_CONF_DIR=$HIVE_HOME/conf
  78. }