Showing posts with label Single Node. Show all posts
Showing posts with label Single Node. Show all posts

Friday, September 2, 2016

Hadoop 2.7.0 Single Node Cluster Setup on Ubuntu 15.04




$ sudo apt-get update

$ sudo apt-get install default-jdk

$ java -version

$ sudo apt-get install ssh

$ sudo apt-get install rsync

$ ssh-keygen -t dsa -P ' ' -f ~/.ssh/id_dsa

$ cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys

$ wget -c http://apache.mirrors.lucidnetworks.net/hadoop/common/hadoop-2.7.0/hadoop-2.7.0.tar.gz

$ sudo tar -zxvf hadoop-2.7.0.tar.gz

$ sudo mv hadoop /usr/local/hadoop

$ update-alternatives --config java

$ sudo nano ~/.bashrc

          #Hadoop Variables
          export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
          export HADOOP_HOME=/usr/local/hadoop
          export PATH=$PATH:$HADOOP_HOME/bin
          export PATH=$PATH:$HADOOP_HOME/sbin
          export HADOOP_MAPRED_HOME=$HADOOP_HOME
          export HADOOP_COMMON_HOME=$HADOOP_HOME
          export HADOOP_HDFS_HOME=$HADOOP_HOME
          export YARN_HOME=$HADOOP_HOME
          export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
          export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"

$ source ~/.bashrc

$ cd /usr/local/hadoop/etc/hadoop

$ sudo nano hadoop-env.sh

          #The java implementation to use.
          export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64"

$ sudo nano core-site.xml

          <configuration>
                  <property>
                      <name>fs.defaultFS</name>
                      <value>hdfs://localhost:9000</value>
                  </property>
          </configuration>

$ sudo nano yarn-site.xml

          <configuration>
                  <property>
                      <name>yarn.nodemanager.aux-services</name>
                      <value>mapreduce_shuffle</value>
                  <property>
                  <property>
                      <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
                      <value> org.apache.hadoop.mapred.ShuffleHandler</value>
                  </property>
          </configuration>

$ sudo cp mapred.site.xml.template mapred-site.xml

$ sudo nano mapred-site.xml

          <configuration>
                  <property>
                      <name>mapreduce.framework.name</name>
                      <value>yarn</value>
                  </property>
          </configuration>

$ sudo nano hdfs-site.xml

          <configuration>
                  <property>
                      <name>dfs.replication</name>
                      <value>1</value>
                  </property>
                  <property>
                      <name>dfs.namenode.name.dir</name>
                      <value>file:/usr/local/hadoop/hadoop_data/hdfs/namenode</value>
                  </property>
                  <property>
                      <name>dfs.datanode.data.dir</name>
                      <value>file:/usr/local/hadoop/hadoop_store/hdfs/datanode</value>
                  </property>
          </configuration>

$ cd
$ sudo apt-get update

$ sudo apt-get install default-jdk

$ java -version

$ sudo apt-get install ssh

$ sudo apt-get install rsync

$ ssh-keygen -t dsa -P ' ' -f ~/.ssh/id_dsa

$ cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys

$ wget -c http://apache.mirrors.lucidnetworks.net/hadoop/common/hadoop-2.7.0/hadoop-2.7.0.tar.gz

$ sudo tar -zxvf hadoop-2.7.0.tar.gz

$ sudo mv hadoop /usr/local/hadoop

$ update-alternatives --config java

$ sudo nano ~/.bashrc

#Hadoop Variables
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
export HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"

$ source ~/.bashrc

$ cd /usr/local/hadoop/etc/hadoop

$ sudo nano hadoop-env.sh

#The java implementation to use.
export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64"

$ sudo nano core-site.xml



fs.defaultFS
hdfs://localhost:9000



$ sudo nano yarn-site.xml



yarn.nodemanager.aux-services
mapreduce_shuffle


yarn.nodemanager.aux-services.mapreduce.shuffle.class
org.apache.hadoop.mapred.ShuffleHandler



$ sudo cp mapred.site.xml.template mapred-site.xml

$ sudo nano mapred-site.xml



mapreduce.framework.name
yarn



$ sudo nano hdfs-site.xml



dfs.replication
1


dfs.namenode.name.dir
file:/usr/local/hadoop/hadoop_data/hdfs/namenode


dfs.datanode.data.dir
file:/usr/local/hadoop/hadoop_store/hdfs/datanode



$ cd

$ mkdir -p /usr/local/hadoop/hadoop_data/hdfs/namenode

$ mkdir -p /usr/local/hadoop/hadoop_data/hdfs/datanode

$ sudo chown chaal:chaal -R /usr/local/hadoop

$ hdfs namenode -format

$ start-all.sh

$ jps



http://192.168.56.10:8088/
http://192.168.56.10:50070/
$ mkdir -p /usr/local/hadoop/hadoop_data/hdfs/namenode

$ mkdir -p /usr/local/hadoop/hadoop_data/hdfs/datanode

$ sudo chown chaal:chaal -R /usr/local/hadoop

$ hdfs namenode -format

$ start-all.sh

$ jps



http://192.168.56.10:8088/
http://192.168.56.10:50070/