环境准备
IP | HOSTNAME | SYSTEM |
192.168.131.129 | hadoop-master | CentOS 7.6 |
192.168.131.135 | hadoop-slave1 | CentOS 7.6 |
192.168.131.137 | hadoop-slave2 | CentOS 7.6 |
[root@localhost ~]# cat /etc/redhat-release CentOS Linux release 7.6.1810 (Core) [root@localhost ~]# sestatus SELinux status: disabled [root@localhost ~]# systemctl status firewalld.service ● firewalld.service - firewalld - dynamic firewall daemon Loaded: loaded (/usr/lib/systemd/system/firewalld.service; disabled; vendor preset: enabled) Active: inactive (dead) Docs: man:firewalld(1) [root@localhost ~]# hostnamectl set-hostname --static hadoop-master [root@localhost ~]# hostnamectl set-hostname --static hadoop-slave1 [root@localhost ~]# hostnamectl set-hostname --static hadoop-slave2
配置免密
[root@hadoop-master ~]# cat >> /etc/hosts <<EOF 192.168.131.129 hadoop-master 192.168.131.135 hadoop-slave1 192.168.131.137 hadoop-slave2 EOF [root@hadoop-master ~]# ssh-keygen [root@hadoop-master ~]# ssh-copy-id hadoop-slave1 [root@hadoop-master ~]# ssh-copy-id hadoop-slave2 [root@hadoop-master ~]# scp /etc/hosts hadoop-slave1:/etc/hosts hosts 100% 248 151.9KB/s 00:00 [root@hadoop-master ~]# scp /etc/hosts hadoop-slave2:/etc/hosts hosts 100% 248 220.9KB/s 00:00
配置java环境
[root@hadoop-master ~]# tar xf jdk-8u211-linux-x64.tar.gz -C /usr/local/ [root@hadoop-master ~]# ln -s /usr/local/jdk1.8.0_211/ /usr/local/java [root@hadoop-master ~]# ll /usr/local/ total 0 drwxr-xr-x. 2 root root 6 Apr 11 2018 bin drwxr-xr-x. 2 root root 6 Apr 11 2018 etc drwxr-xr-x. 2 root root 6 Apr 11 2018 games drwxr-xr-x. 2 root root 6 Apr 11 2018 include lrwxrwxrwx 1 root root 24 Sep 7 14:49 java -> /usr/local/jdk1.8.0_211/ drwxr-xr-x 7 10 143 245 Apr 2 2019 jdk1.8.0_211 drwxr-xr-x. 2 root root 6 Apr 11 2018 lib drwxr-xr-x. 2 root root 6 Apr 11 2018 lib64 drwxr-xr-x. 2 root root 6 Apr 11 2018 libexec drwxr-xr-x. 2 root root 6 Apr 11 2018 sbin drwxr-xr-x. 5 root root 49 Mar 30 2019 share drwxr-xr-x. 2 root root 6 Apr 11 2018 src [root@hadoop-master ~]# vim /etc/profile export JAVA_HOME=/usr/local/java/ export PATH=$JAVA_HOME/bin:$PATH export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar [root@hadoop-master ~]# source /etc/profile [root@hadoop-master ~]# java -version java version "1.8.0_211" Java(TM) SE Runtime Environment (build 1.8.0_211-b12) Java HotSpot(TM) 64-Bit Server VM (build 25.211-b12, mixed mode)
部署hadoop
[root@hadoop-master ~]# wget https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-2.7.7/hadoop-2.7.7.tar.gz [root@hadoop-master ~]# tar xf hadoop-2.7.7.tar.gz -C /opt/ [root@hadoop-master ~]# vim /etc/profile export HADOOP_HOME=/opt/hadoop-2.7.7 export PATH=$HADOOP_HOME/bin:$PATH [root@hadoop-master ~]# source /etc/profile [root@hadoop-master ~]# cd /opt/hadoop-2.7.7/ [root@hadoop-master hadoop-2.7.7]# ls bin etc include lib libexec LICENSE.txt NOTICE.txt README.txt sbin share [root@hadoop-master hadoop-2.7.7]# cd etc/hadoop/ [root@hadoop-master hadoop]# ll total 152 -rw-r--r-- 1 1000 ftp 4436 Jul 19 2018 capacity-scheduler.xml -rw-r--r-- 1 1000 ftp 1335 Jul 19 2018 configuration.xsl -rw-r--r-- 1 1000 ftp 318 Jul 19 2018 container-executor.cfg -rw-r--r-- 1 1000 ftp 774 Jul 19 2018 core-site.xml -rw-r--r-- 1 1000 ftp 3670 Jul 19 2018 hadoop-env.cmd -rw-r--r-- 1 1000 ftp 4224 Jul 19 2018 hadoop-env.sh -rw-r--r-- 1 1000 ftp 2598 Jul 19 2018 hadoop-metrics2.properties -rw-r--r-- 1 1000 ftp 2490 Jul 19 2018 hadoop-metrics.properties -rw-r--r-- 1 1000 ftp 9683 Jul 19 2018 hadoop-policy.xml -rw-r--r-- 1 1000 ftp 775 Jul 19 2018 hdfs-site.xml -rw-r--r-- 1 1000 ftp 1449 Jul 19 2018 httpfs-env.sh -rw-r--r-- 1 1000 ftp 1657 Jul 19 2018 httpfs-log4j.properties -rw-r--r-- 1 1000 ftp 21 Jul 19 2018 httpfs-signature.secret -rw-r--r-- 1 1000 ftp 620 Jul 19 2018 httpfs-site.xml -rw-r--r-- 1 1000 ftp 3518 Jul 19 2018 kms-acls.xml -rw-r--r-- 1 1000 ftp 1527 Jul 19 2018 kms-env.sh -rw-r--r-- 1 1000 ftp 1631 Jul 19 2018 kms-log4j.properties -rw-r--r-- 1 1000 ftp 5540 Jul 19 2018 kms-site.xml -rw-r--r-- 1 1000 ftp 11801 Jul 19 2018 log4j.properties -rw-r--r-- 1 1000 ftp 951 Jul 19 2018 mapred-env.cmd -rw-r--r-- 1 1000 ftp 1383 Jul 19 2018 mapred-env.sh -rw-r--r-- 1 1000 ftp 4113 Jul 19 2018 mapred-queues.xml.template -rw-r--r-- 1 1000 ftp 758 Jul 19 2018 mapred-site.xml.template -rw-r--r-- 1 1000 ftp 10 Jul 19 2018 slaves -rw-r--r-- 1 1000 ftp 2316 Jul 19 2018 ssl-client.xml.example -rw-r--r-- 1 1000 ftp 2697 Jul 19 2018 ssl-server.xml.example -rw-r--r-- 1 1000 ftp 2250 Jul 19 2018 yarn-env.cmd -rw-r--r-- 1 1000 ftp 4567 Jul 19 2018 yarn-env.sh -rw-r--r-- 1 1000 ftp 690 Jul 19 2018 yarn-site.xml
部署hadoop
[root@hadoop-master ~]# wget https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-2.7.7/hadoop-2.7.7.tar.gz [root@hadoop-master ~]# tar xf hadoop-2.7.7.tar.gz -C /opt/ [root@hadoop-master ~]# vim /etc/profile export HADOOP_HOME=/opt/hadoop-2.7.7 export PATH=$HADOOP_HOME/bin:$PATH [root@hadoop-master ~]# source /etc/profile [root@hadoop-master ~]# cd /opt/hadoop-2.7.7/ [root@hadoop-master hadoop-2.7.7]# ls bin etc include lib libexec LICENSE.txt NOTICE.txt README.txt sbin share [root@hadoop-master hadoop-2.7.7]# cd etc/hadoop/ [root@hadoop-master hadoop]# ll total 152 -rw-r--r-- 1 1000 ftp 4436 Jul 19 2018 capacity-scheduler.xml -rw-r--r-- 1 1000 ftp 1335 Jul 19 2018 configuration.xsl -rw-r--r-- 1 1000 ftp 318 Jul 19 2018 container-executor.cfg -rw-r--r-- 1 1000 ftp 774 Jul 19 2018 core-site.xml -rw-r--r-- 1 1000 ftp 3670 Jul 19 2018 hadoop-env.cmd -rw-r--r-- 1 1000 ftp 4224 Jul 19 2018 hadoop-env.sh -rw-r--r-- 1 1000 ftp 2598 Jul 19 2018 hadoop-metrics2.properties -rw-r--r-- 1 1000 ftp 2490 Jul 19 2018 hadoop-metrics.properties -rw-r--r-- 1 1000 ftp 9683 Jul 19 2018 hadoop-policy.xml -rw-r--r-- 1 1000 ftp 775 Jul 19 2018 hdfs-site.xml -rw-r--r-- 1 1000 ftp 1449 Jul 19 2018 httpfs-env.sh -rw-r--r-- 1 1000 ftp 1657 Jul 19 2018 httpfs-log4j.properties -rw-r--r-- 1 1000 ftp 21 Jul 19 2018 httpfs-signature.secret -rw-r--r-- 1 1000 ftp 620 Jul 19 2018 httpfs-site.xml -rw-r--r-- 1 1000 ftp 3518 Jul 19 2018 kms-acls.xml -rw-r--r-- 1 1000 ftp 1527 Jul 19 2018 kms-env.sh -rw-r--r-- 1 1000 ftp 1631 Jul 19 2018 kms-log4j.properties -rw-r--r-- 1 1000 ftp 5540 Jul 19 2018 kms-site.xml -rw-r--r-- 1 1000 ftp 11801 Jul 19 2018 log4j.properties -rw-r--r-- 1 1000 ftp 951 Jul 19 2018 mapred-env.cmd -rw-r--r-- 1 1000 ftp 1383 Jul 19 2018 mapred-env.sh -rw-r--r-- 1 1000 ftp 4113 Jul 19 2018 mapred-queues.xml.template -rw-r--r-- 1 1000 ftp 758 Jul 19 2018 mapred-site.xml.template -rw-r--r-- 1 1000 ftp 10 Jul 19 2018 slaves -rw-r--r-- 1 1000 ftp 2316 Jul 19 2018 ssl-client.xml.example -rw-r--r-- 1 1000 ftp 2697 Jul 19 2018 ssl-server.xml.example -rw-r--r-- 1 1000 ftp 2250 Jul 19 2018 yarn-env.cmd -rw-r--r-- 1 1000 ftp 4567 Jul 19 2018 yarn-env.sh -rw-r--r-- 1 1000 ftp 690 Jul 19 2018 yarn-site.xml
修改hadoop-env.sh和yarn-env.sh中JAVA_HOME参数
[root@hadoop-master hadoop]# sed -i 's#export JAVA_HOME=.*#export JAVA_HOME='/usr/local/java'#g' hadoop-env.sh [root@hadoop-master hadoop]# sed -i 's#.*export JAVA_HOME=.*#export JAVA_HOME='/usr/local/java'#g' yarn-env.sh [root@hadoop-master hadoop]# grep "export JAVA_HOME" hadoop-env.sh export JAVA_HOME=/usr/local/java [root@hadoop-master hadoop]# grep "export JAVA_HOME" yarn-env.sh export JAVA_HOME=/usr/local/java
编辑core-site.xml
[root@hadoop-master hadoop]# vim core-site.xml <configuration> <property> <name>fs.default.name</name> <value>hdfs://192.168.131.129:9000</value> </property> <property> <name>hadoop.tmp.dir</name> <value>/opt/hadoop/tmp</value> </property> </configuration>
编辑hdfs-site.xml
[root@hadoop-master hadoop]# vim hdfs-site.xml <configuration> <property> <name>dfs.replication</name> <value>2</value> </property> <property> <name>dfs.namenode.name.dir</name> <value>file:/opt/hadoop/dfs/name</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/opt/hadoop/dfs/data</value> </property> </configuration>
编辑mapred-site.xml
[root@hadoop-master hadoop]# cp mapred-site.xml.template mapred-site.xml [root@hadoop-master hadoop]# vim mapred-site.xml <configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> <property> <name>mapreduce.jobhistory.address</name> <value>192.168.131.129:10020</value> </property> <property> <name>mapreduce.jobhistory.webapp.address</name> <value>192.168.131.129:19888</value> </property> </configuration>
编辑yarn-site.xml
[root@hadoop-master hadoop]# vim yarn-site.xml <configuration> <!-- Site specific YARN configuration properties --> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name> <value>org.apache.hadoop.mapred.ShuffleHandler</value> </property> <property> <name>yarn.resourcemanager.address</name> <value>192.168.131.129:8032</value> </property> <property> <name>yarn.resourcemanager.scheduler.address</name> <value>192.168.131.129:8030</value> </property> <property> <name>yarn.resourcemanager.resource-tracker.address</name> <value>192.168.131.129:8031</value> </property> <property> <name>yarn.resourcemanager.admin.address</name> <value>192.168.131.129:8033</value> </property> <property> <name>yarn.resourcemanager.webapp.address</name> <value>192.168.131.129:8088</value> </property> </configuration>
编辑slaves
[root@hadoop-master hadoop]# vim slaves hadoop-slave1 hadoop-slave2
把配置分发到各个节点
[root@hadoop-master opt]# scp -r hadoop-2.7.7/ hadoop-slave1:/opt/ [root@hadoop-master opt]# scp -r hadoop-2.7.7/ hadoop-slave2:/opt/
在主节点启动hadoop
初始化namenode [root@hadoop-master ~]# hdfs namenode -format 启动HDFS [root@hadoop-master ~]# cd /opt/hadoop-2.7.7/sbin/ [root@hadoop-master sbin]# ./start-dfs.sh 启动YARN [root@hadoop-master sbin]# ./start-yarn.sh
测试
浏览器访问 YARN WEB地址:http://192.168.131.129:8088/ HDFS WEB地址:http://192.168.131.129:50070/