1. /etc/profile JAVA_HOME, HADOOP_HOME
export JAVA_HOME=
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib
export PATH=$JAVA_HOME/bin:$PATH
export HADOOP_HOME=
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
2,hadoop-env.sh JAVA_HOME
3,
<?xml version="1.0"?>
<!-- core-site.xml -->
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost/</value>
</property>
</configuration>
<?xml version="1.0"?>
<!-- hdfs-site.xml -->
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
<?xml version="1.0"?>
<!-- mapred-site.xml -->
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
<?xml version="1.0"?>
<!-- yarn-site.xml -->
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>localhost</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
4,#ssh localhost
sudo apt-get install ssh
ssh-keygen -t rsa -P ‘‘ -f ~/.ssh/id_rsa
ssh localhost
5,hdfs namenode -format
#start_hadoop.sh
#!bin/bash
start-dfs.sh
start-yarn.sh
mr-jobhistory-daemon.sh start historyserver
#stop_hadoop.sh
#!bin/bash
mr-jobhistory-daemon.sh stop historyserver
stop-yarn.sh
stop-dfs.sh
6,
hadoop fs -mkdir -p /user/$USER
7,
namenode: http://localhost:50070/
resource manager: http://localhost:8088/
history server: http://localhost:19888/