| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152 |
- #!/bin/bash
- # Licensed to Cloudera, Inc. under one
- # or more contributor license agreements. See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership. Cloudera, Inc. licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License. You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- # Common functions for hudson build scripts
- #
- # Use $CDH_URL to control where to download Hadoop.
- # If not specified, it uses the $CDH variable to select an archive location.
- #
- if [ -z "$CDH_URL" ] ; then
- CDH=${CDH:-"2"}
- case "$CDH" in
- 2)
- CDH_URL="http://archive.cloudera.com/cdh/2/hadoop-0.20.1+169.68.tar.gz"
- ;;
- 3)
- CDH_URL="http://archive.cloudera.com/cdh/3/hadoop-0.20.2+228.tar.gz"
- ;;
- esac
- fi
- CDH_TGZ=$(basename $CDH_URL)
- CDH_VERSION=${CDH_TGZ/.tar.gz/}
- CDH_CACHE="$HOME/.hue_cache/${CDH_TGZ}"
- build_hadoop() {
- if [ ! -f $CDH_CACHE ]; then
- mkdir -p $HOME/.hue_cache
- echo "Downloading $CDH_URL..."
- wget $CDH_URL -O $CDH_CACHE
- fi
- HADOOP_DIR=$HUE_ROOT/ext/hadoop
- mkdir -p $HADOOP_DIR
- echo "Unpacking $CDH_CACHE to $HADOOP_DIR"
- tar -C $HADOOP_DIR -xzf $CDH_CACHE
- export HADOOP_HOME="$HADOOP_DIR/${CDH_VERSION}"
- }
|