1.配置ssh秘钥登录
ssh-keygen -t rsa
cd ~/.ssh/
cat ./id_rsa.pub >> ./authorized_keys
ssh localhost #测试登录
exit #退出
2.配置java环境
cd /usr/local
sudo tar -zxvf ~/jdk-8u162-linux-x64.tar.gz -C .
vim ~/.bashrc
export JAVA_HOME=/usr/local/jdk1.8.0_162
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
source ~/.bashrc
java -version
3. 安装hadoop
cd /usr/local
sudo tar -zvxf ~/hadoop-3.1.3.tar.gz -C .
sudo chown -R k ./hadoop-3.1.3 #改成自己的当前用户名
cd /usr/local/hadoop-3.1.3
./bin/hadoop version #查看版本
4.hadoop伪分布式配置
cd /usr/local/hadoop-3.1.3/etc/hadoop/
vim core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop-3.1.3/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
vim hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop-3.1.3/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop-3.1.3/tmp/dfs/data</value>
</property>
</configuration>
vim hadoop-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_162
5. 格式化
cd /usr/local/hadoop-3.1.3
./bin/hdfs namenode -format
6.启动
cd /usr/local/hadoop-3.1.3
./sbin/start-dfs.sh
./sbin/stop-dfs.sh
7.查看
jps
localhost:9870