1:安装包
http://hadoop.apache.org/releases.html
http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
hadoop-2.6.4 (binary)
jdk-8u102-linux-x64.tar.gz
2:节点信息
centos7 * 3
master 10.10.0.115
slave1 10.10.0.116
slave2 10.10.0.117
3:安装过程
3.1 节下初始化(略)
主机名 防火墙 selinux 等
3.2 节点互信
[[email protected] ~]#ssh-keygen
[[email protected] ~]#cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys
[[email protected] ~]#scp -r /root/.ssh slave1:/root/
[[email protected] ~]#scp -r /root/.ssh slave2:/root/
3.3 在所有节点安装jdk
[[email protected] ~]#tar -zxvf jdk-8u102-linux-x64.tar.gz
[[email protected] ~]#mkdir /usr/soft
[[email protected] ~]#mv jdk1.8.0_102 /usr/soft/
[[email protected] ~]#vim /etc/profile
...
export JAVA_HOME=/usr/soft/jdk1.8.0_102
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$PATH:$JAVA_HOME/bin
[[email protected] ~]#source /etc/profile
[[email protected] ~]#java -version
java version "1.8.0_102"
Java(TM) SE Runtime Environment (build 1.8.0_102-b14)
Java HotSpot(TM) 64-Bit Server VM (build 25.102-b14, mixed mode)
其它节点同样操作
3.4 安装hadoop
先在master上安装配置,然后scp到其它节点即可
[[email protected] ~]#tar -zxvf hadoop-2.6.4.tar.gz
[[email protected] ~]#mv hadoop-2.6.4 /usr/soft/
[[email protected] ~]#cd /usr/soft/hadoop-2.6.4/
[[email protected] ~]#mkdir hdfs
[[email protected] ~]#mkdir hdfs/data
[[email protected] ~]#mkdir hdfs/name
[[email protected] ~]#mkdir tmp
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://10.10.0.115:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/soft/hadoop-2.6.4/tmp</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131702</value>
</property>
</configuration>
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/soft/hadoop-2.6.4/hdfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/soft/hadoop-2.6.4/hdfs/data</value>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>10.10.0.115:9001</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
</configuration>
[[email protected] ~]#cp /usr/soft/hadoop-2.6.4/etc/hadoop/mapred-site.xml.template /usr/soft/hadoop-2.6.4/etc/hadoop/mapred-site.xml
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.jobhistory.address</name>
<value>10.10.0.115:10020</value>
</property>
<property>
<name>mapreduce.jobhistory.webapp.address</name>
<value>10.10.0.115:19888</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>10.10.0.115:8032</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>10.10.0.115:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>10.10.0.115:8031</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>10.10.0.115:8033</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>10.10.0.115:8088</value>
</property>
<property>
<name>yarn.nodemanager.resource.memory-mb</name>
<value>768</value>
</property>
</configuration>
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/hadoop-env.sh
...
export JAVA_HOME=/usr/soft/jdk1.8.0_102
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/yarn-env.sh
...
export JAVA_HOME=/usr/soft/jdk1.8.0_102
[[email protected] ~]# cat /usr/soft/hadoop-2.6.4/etc/hadoop/slaves
10.10.0.116
10.10.0.117
[[email protected] ~]#scp -r /usr/soft/jdk1.8.0_102 slave1:/usr/soft/
[[email protected] ~]#scp -r /usr/soft/jdk1.8.0_102 slave2:/usr/soft/
[[email protected] ~]#/usr/soft/hadoop-2.6.4/bin/hdfs namenode -format
[[email protected] ~]# /usr/soft/hadoop-2.6.4/sbin/start-all.sh
浏览器打开http://10.10.0.115:8088/
浏览器打开http://10.10.0.115:50070/