build-functions 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. #!/bin/bash
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. # Common functions for hudson build scripts
  18. if [ ! -e $HOME/.hue_cache ]; then
  19. mkdir -p $HOME/.hue_cache
  20. fi;
  21. check_mtime() {
  22. MTIME_FILE=${1}
  23. MTIME=$( curl -Is ${2} | awk 'BEGIN {FS=":"} { if ($1 == "Last-Modified") { print substr($2,2) } }' )
  24. if echo "${MTIME}" | diff ${MTIME_FILE} - > /dev/null; then
  25. return 0
  26. else
  27. echo ${MTIME} > ${MTIME_FILE}
  28. return 1
  29. fi;
  30. }
  31. ##########
  32. #
  33. # Use $CDH_URL to control where to download Hadoop.
  34. # If not specified, it uses the $CDH variable to select an archive location.
  35. # Latest public release URL example: http://archive.cloudera.com/cdh5/cdh/5/hadoop-2.6.0-cdh5.13.0.tar.gz
  36. #
  37. CDH_URL=${CDH_URL:-http://repos.jenkins.cloudera.com/cdh5-static/cdh/5/hadoop-2.6.0-cdh5.17.0-SNAPSHOT.tar.gz}
  38. CDH_TGZ=$(basename $CDH_URL)
  39. CDH_VERSION=${CDH_TGZ/.tar.gz/}
  40. CDH_SHORT_VERSION=${CDH_VERSION/hadoop-/}
  41. CDH_CACHE="$HOME/.hue_cache/${CDH_TGZ}"
  42. CDH_MTIME_FILE="$HOME/.hue_cache/.cdh_mtime"
  43. build_hadoop() {
  44. if ! check_mtime ${CDH_MTIME_FILE} ${CDH_URL} || [ ! -f $CDH_CACHE ]; then
  45. echo "Downloading $CDH_URL..."
  46. wget $CDH_URL -O $CDH_CACHE
  47. fi
  48. HADOOP_DIR=$HUE_ROOT/ext/hadoop
  49. export YARN_HOME="$HADOOP_DIR/${CDH_VERSION}"
  50. export HADOOP_HDFS_HOME="$HADOOP_DIR/${CDH_VERSION}/share/hadoop/hdfs"
  51. export HADOOP_BIN="$HADOOP_DIR/${CDH_VERSION}/bin/hadoop"
  52. export HADOOP_MAPRED_HOME="$HADOOP_DIR/${CDH_VERSION}/share/hadoop/mapreduce2"
  53. export HADOOP_MAPRED_BIN="$HADOOP_DIR/${CDH_VERSION}/bin/mapred"
  54. mkdir -p $HADOOP_DIR
  55. rm -rf "$HADOOP_DIR/${CDH_VERSION}"
  56. echo "Unpacking $CDH_CACHE to $HADOOP_DIR"
  57. tar -C $HADOOP_DIR -xzf $CDH_CACHE
  58. ln -sf $HADOOP_DIR/${CDH_VERSION} $HADOOP_DIR/hadoop
  59. # For Hive
  60. ln -sf $HADOOP_DIR/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-*.jar $HADOOP_DIR/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar
  61. ln -sf $HADOOP_DIR/hadoop/share/hadoop/common/hadoop-common-*-SNAPSHOT.jar $HADOOP_DIR/hadoop/share/hadoop/common/hadoop-common.jar
  62. ln -sf $HADOOP_DIR/hadoop/share/hadoop/common/lib/hadoop-auth-*-SNAPSHOT.jar $HADOOP_DIR/hadoop/share/hadoop/common/lib/hadoop-auth.jar
  63. ln -sf $HADOOP_DIR/hadoop/share/hadoop/hdfs/hadoop-hdfs-${CDH_SHORT_VERSION}.jar $HADOOP_DIR/hadoop/share/hadoop/hdfs/hadoop-hdfs.jar
  64. # For MR2
  65. ln -sf "$HADOOP_DIR/${CDH_VERSION}/share/hadoop/mapreduce2" "$HADOOP_DIR/${CDH_VERSION}/share/hadoop/mapreduce"
  66. }
  67. ##########
  68. HIVE_URL=${HIVE_URL:-http://repos.jenkins.cloudera.com/cdh5-static/cdh/5/hive-1.1.0-cdh5.17.0-SNAPSHOT.tar.gz}
  69. HIVE_TGZ=$(basename $HIVE_URL)
  70. HIVE_VERSION=${HIVE_TGZ/.tar.gz/}
  71. HIVE_CACHE="$HOME/.hue_cache/${HIVE_TGZ}"
  72. HIVE_MTIME_FILE="$HOME/.hue_cache/.hive_mtime"
  73. build_hive() {
  74. if ! check_mtime ${HIVE_MTIME_FILE} ${HIVE_URL} || [ ! -f $HIVE_CACHE ]; then
  75. echo "Downloading $HIVE_URL..."
  76. wget $HIVE_URL -O $HIVE_CACHE
  77. fi
  78. HIVE_DIR=$HUE_ROOT/ext/hive
  79. export HIVE_HOME="$HIVE_DIR/${HIVE_VERSION}"
  80. mkdir -p $HIVE_DIR
  81. rm -rf $HIVE_HOME
  82. echo "Unpacking $HIVE_CACHE to $HIVE_DIR"
  83. tar -C $HIVE_DIR -xzf $HIVE_CACHE
  84. ln -sf $HIVE_DIR/${HIVE_VERSION} $HIVE_DIR/hive
  85. export HIVE_CONF_DIR=$HIVE_HOME/conf
  86. # Weird HADOOP_HOME, creating a HADOOP_HIVE_HOME
  87. #sed -i'.bk' "s|HADOOP=\$HADOOP_HOME/bin/hadoop|HADOOP=\$HADOOP_HIVE_HOME/bin/hadoop|g" $HIVE_HOME/bin/hive
  88. }
  89. ##########
  90. OOZIE_URL=${OOZIE_URL:-http://repos.jenkins.cloudera.com/cdh5-static/cdh/5/oozie-4.1.0-cdh5.17.0-SNAPSHOT.tar.gz}
  91. OOZIE_TGZ=$(basename $OOZIE_URL)
  92. OOZIE_VERSION=${OOZIE_TGZ/.tar.gz/}
  93. OOZIE_CACHE="$HOME/.hue_cache/${OOZIE_TGZ}"
  94. OOZIE_MTIME_FILE="$HOME/.hue_cache/.oozie_mtime"
  95. build_oozie() {
  96. if ! check_mtime ${OOZIE_MTIME_FILE} ${OOZIE_URL} || [ ! -f $OOZIE_CACHE ]; then
  97. echo "Downloading $OOZIE_URL..."
  98. wget $OOZIE_URL -O $OOZIE_CACHE
  99. fi
  100. OOZIE_DIR=$HUE_ROOT/ext/oozie
  101. export OOZIE_HOME="$OOZIE_DIR/${OOZIE_VERSION}"
  102. mkdir -p $OOZIE_DIR
  103. rm -rf $OOZIE_HOME
  104. echo "Unpacking $OOZIE_CACHE to $OOZIE_DIR"
  105. tar -C $OOZIE_DIR -xzf $OOZIE_CACHE
  106. export OOZIE_CONF_DIR=$OOZIE_HOME/conf
  107. rm -rf $OOZIE_DIR/oozie
  108. ln -sf $OOZIE_DIR/${OOZIE_VERSION} $OOZIE_DIR/oozie
  109. mkdir -p $OOZIE_HOME/libext
  110. tar -C $OOZIE_HOME/libext -zxvf $OOZIE_HOME/oozie-hadooplibs-*.tar.gz
  111. HADOOP_LIB=`echo "${CDH_VERSION}" | sed 's/hadoop/hadooplib/g'`
  112. cp $OOZIE_HOME/libext/oozie-*/hadooplibs/${HADOOP_LIB}*/*jar $OOZIE_HOME/libext/
  113. tar -C $OOZIE_HOME -zxvf $OOZIE_HOME/oozie-examples.tar.gz
  114. cp $OOZIE_HOME/oozie-sharelib-*-yarn.tar.gz $OOZIE_HOME/oozie-sharelib.tar.gz
  115. $OOZIE_HOME/bin/oozie-setup.sh prepare-war
  116. $OOZIE_HOME/bin/ooziedb.sh create -sqlfile oozie.sql -run
  117. }
  118. ##########
  119. SQOOP_URL=${SQOOP_URL:-http://repos.jenkins.cloudera.com/cdh5-static/cdh/5/sqoop2-1.99.5-cdh5.17.0-SNAPSHOT.tar.gz}
  120. SQOOP_TGZ=$(basename $SQOOP_URL)
  121. SQOOP_VERSION=${SQOOP_TGZ/.tar.gz/}
  122. SQOOP_CACHE="$HOME/.hue_cache/${SQOOP_TGZ}"
  123. SQOOP_MTIME_FILE="$HOME/.hue_cache/.sqoop_mtime"
  124. build_sqoop() {
  125. if ! check_mtime ${SQOOP_MTIME_FILE} ${SQOOP_URL} || [ ! -f $SQOOP_CACHE ]; then
  126. echo "Downloading $SQOOP_URL..."
  127. wget $SQOOP_URL -O $SQOOP_CACHE
  128. fi
  129. SQOOP_DIR=$HUE_ROOT/ext/sqoop
  130. export SQOOP_HOME="$SQOOP_DIR/${SQOOP_VERSION}"
  131. mkdir -p $SQOOP_DIR
  132. rm -rf $SQOOP_HOME
  133. echo "Unpacking $SQOOP_CACHE to $SQOOP_DIR"
  134. tar -C $SQOOP_DIR -xzf $SQOOP_CACHE
  135. export SQOOP_CONF_DIR=$SQOOP_HOME/server/conf
  136. rm -rf $SQOOP_DIR/sqoop
  137. ln -sf $SQOOP_DIR/${SQOOP_VERSION} $SQOOP_DIR/sqoop
  138. # Change ports and hostnames to be configurable or replaceable
  139. sed -i'.bk' 's/12000/${test.port.http}/g' $SQOOP_CONF_DIR/server.xml
  140. sed -i'.bk' 's/12001/${test.port.shutdown}/g' $SQOOP_CONF_DIR/server.xml
  141. sed -i'.bk' 's/localhost/${test.host.local}/g' $SQOOP_CONF_DIR/server.xml
  142. sed -i'.bk' "s|\(common.loader.*$\)|\1,$HADOOP_DIR/hadoop/share/hadoop/common/lib/*.jar,$HADOOP_DIR/hadoop/share/hadoop/mapreduce1/*.jar,$HADOOP_DIR/hadoop/share/hadoop/mapreduce1/lib/*.jar|g" $SQOOP_CONF_DIR/catalina.properties
  143. sed -i'.bk' "s|\${catalina\.base}/logs|\${test.log.dir}|g" $SQOOP_CONF_DIR/logging.properties
  144. sed -i'.bk' "s|\@LOGDIR\@|\${test.log.dir}|g" $SQOOP_CONF_DIR/sqoop.properties
  145. sed -i'.bk' "s|\@BASEDIR\@|\${test.base.dir}|g" $SQOOP_CONF_DIR/sqoop.properties
  146. sed -i'.bk' "s|/etc/hadoop/conf|\${test.hadoop.conf.dir}|g" $SQOOP_CONF_DIR/sqoop.properties
  147. }