THIS IS A TEST INSTANCE. ALL YOUR CHANGES WILL BE LOST!!!!
...
Code Block | ||
---|---|---|
| ||
# General build requirements sudo apt-get install build-essential git maven # General Java requirements sudo apt-get install openjdk-8-jdk #==============# # Impala Build # #==============# # Packages required to build Python virtualenv sudo apt-get install libpython-dev # Ubuntu 16.04 doesn't have /usr/bin/python symlink by default, which is required by some shebangs. Package "python" provides it. sudo apt-get install python # Packages required to build Impala sudo apt-get install libssl-dev libsasl2-dev libkrb5-dev # Minimum environment variables required to build. export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64/ # Additional recommended variable to speed up builds. export USE_GOLD_LINKER=true ./buildall.sh -noclean -notests # TODO: wait for Ubuntu 16.04 fixes: std::isnan() and std::isinf(). #==================# # Test environment # #==================# # Postgres sudo apt-get install postgresql sudo service postgresql start # Edited pg_hba.conf to change "peer" to "trust" sudo -u postgres psql postgres -c "CREATE ROLE hiveuser LOGIN PASSWORD 'password'; ALTER ROLE hiveuser WITH CREATEDB;" # Setup passwordless ssh for hbase sudo apt-get install openssh-server sudo service ssh start ssh-keygen -t dsa # Do not type in any passkey. Just press enter. cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys # Setup ntpd for Kudu sudo apt-get install ntp sudo systemctl restart ntp.service # Add a path for HDFS domain sockets sudo mkdir /var/lib/hadoop-hdfs/ sudo chown <user> /var/lib/hadoop-hdfs/ # Get lzo libraries for data loading git clone https://github.com/cloudera/impala-lzo.git ../Impala-lzo git clone https://github.com/cloudera/hadoop-lzo.git ../hadoop-lzo sudo apt-get install liblzo2-dev cd ../hadoop-lzo ant package # Setup loopback # TODO |