修改主机名并添加解析文件:
hostnamectl set-hostname node1
hostnamectl set-hostname node2
hostnamectl set-hostname node3
vim /etc/hosts
192.168.1.10 node1
192.168.1.11 node2
192.168.1.12 node3
免密码登录:
ssh-keygen -t rsa //一路回车键即可
ssh-copy-id node1 //第一次输入 yes 和 hduser 的密码
ssh-copy-id node2
ssh-copy-id node3
创建用户:
groupadd hadoop
useradd -g hadoop hduser
echo 123 |passwd --stdin hduser
vim /etc/sudoers
92行:hduser ALL=(ALL) ALL
全部主机:
rpm -ivh jdk-8u171-linux-x64.rpm
vim /etc/profile
export JAVA_HOME=/usr/java/jdk1.8.0_171-amd64
export CLASSPATH=$JAVA_HOME/lib:$CLASSPATH
export PATH=$JAVA_HOME/bin:$PATH
验证:
source /etc/profile
node1:
tar zxvf hadoop-2.6.5.tar.gz
mv hadoop-2.6.5 hadoop
mv hadoop /home/hduser/
vim /etc/profile
hadoop
export HADOOP_HOME=/home/hduser/hadoop
export PATH=$HADOOP_HOME/bin:$PATH
node1:
vim hadoop-env.sh
增加以下内容:
export JAVA_HOME=/usr/java/jdk1.8.0_171-amd64
vim yarn-env.sh
增加以下内容:
export JAVA_HOME=/usr/java/jdk1.8.0_171-amd64
vim slaves
增加以下内容:
node2
node3
vim core-site.xml
增加以下内容:
fs.defaultFS
hdfs://node1:9000
hadoop.tmp.dir
file:/home/hduser/hadoop/tmp
vim hdfs-site.xml
增加以下内容:
dfs.namenode.secondary.http-address
node1:50090
dfs.namenode.name.dir
file:/home/hduser/hadoop/dfs/name
dfs.datanode.data.dir
file:/home/hduser/hadoop/dfs/data
dfs.replication
2
dfs.webhdfs.enabled
true
vim mapred-site.xml
增加以下内容:
mapreduce.framework.name
yarn
mapreduce.jobhistory.address
node1:10020
mapreduce.jobhistory.webapp.address
node1:19888
vim yarn-site.xml
增加以下内容:
yarn.nodemanager.aux-services
mapreduce_shuffle
yarn.nodemanager.aux-services.mapreduce.shuffle.class
org.apache.hadoop.mapred.ShuffleHandler
yarn.resourcemanager.address
node1:8032
yarn.resourcemanager.scheduler.address
node1:8030
yarn.resourcemanager.resource-tracker.address
node1:8035
yarn.resourcemanager.admin.address
node1:8033
yarn.resourcemanager.webapp.address
node1:8088