7. 克隆三台slave机,分别修改主机名slave1、slave2、slave3
进入etc/sysconfig/network-scripts文件,修改IP分别为:192.168.133.143、192.168.133.144、192.168.133.145
修改hosts文件
[root@master~]# vi /etc/hosts
- 修改为:
192.168.253.5 master 192.168.253.6 slave1 192.168.253.7 slave2 192.168.253.8 slave3
配置SSH登录
1.在每台虚拟机上输入以下命令ssh-keygen -t rsa
2.发送公钥
[root@master .ssh]# cat id_rsa.pub >> authorized_keys [root@master .ssh]# chmod 644 authorized_keys [root@master .ssh]# systemctl restart sshd.service [root@master .ssh]# scp /root/.ssh/authorized_keys slave2:/root/.ssh [root@master .ssh]# scp /root/.ssh/authorized_keys slave3:/root/.ssh [root@master .ssh]# scp /root/.ssh/authorized_keys slave1:/root/.ssh
3.ssh登陆检验
[root@master .ssh]# ssh master The authenticity of host 'master (192.168.133.142)' can't be established. ECDSA key fingerprint is SHA256:2Bffpg/A1+5pIpz1wxrvrtDAOWhygRaJnuRbywSEmOQ. ECDSA key fingerprint is MD5:48:5d:59:ae:19:95:3d:88:4d:3d:56:46:0d:ff:fe:4a. Are you sure you want to continue connecting (yes/no)? yes Warning: Permanently added 'master,192.168.133.142' (ECDSA) to the list of known hosts. Last login: Wed Oct 5 18:51:56 2022 from 192.168.133.156 [root@master ~]# ssh slave1 Last login: Wed Oct 5 18:53:23 2022 from 192.168.133.156 [root@slave1 ~]# exit logout Connection to slave1 closed. [root@master ~]# ssh slave2 Last login: Wed Oct 5 18:53:25 2022 from 192.168.133.156 [root@slave2 ~]# exit logout Connection to slave2 closed. [root@master ~]# ssh slave3 Last login: Wed Oct 5 18:52:07 2022 from 192.168.133.156 [root@slave3 ~]# exit logout Connection to slave3 closed.
运行hadoop
1.格式化HDFS
[root@master ~]#cd /usr/hadoop/hadoop-2.10.1/bin [root@master bin]# hdfs namenode -format
2.启动
start-all.sh
3.jps查看
[root@master bin]# jps 19301 Jps 1626 NameNode 1978 ResourceManager 1821 SecondaryNameNode
测试Hadoop实验
- 查看 NameNode、DataNode:192.168.133.142:50070
- 查看 SecondaryNameNode 信息:192.168.133.142:50090
- 查看 YARN 界面:192.168.133.142:8088
测试Hdfs
创建输入输出路径以及上传的文件:
[root@master hadoop-2.10.1]# hadoop fs -mkdir -p /data/wordcount [root@master hadoop-2.10.1]# hadoop fs -mkdir -p /output/ [root@master hadoop-2.10.1]# vi /usr/inputword [root@master bin]# cat /usr/inputword hello world hello hadoop hello hdfs hello test
将本地准备的输入文件上传到hdfs文件中
[root@master hadoop-2.10.1]# hadoop fs -put /usr/inputword /data/wordcount
WordCount测试
[root@master hadoop-2.10.1]# bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.10.1.jar wordcount /data/wordcount /output/wordcountresult [root@master hadoop-2.10.1]# hadoop fs -text /output/wordcountresult/part-r-00000 hadoop 1 hdfs 1 hello 4 test 1 world 1
wordcount官方格式
bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.10.0.jar wordcount wcinput wcoutput wordcount:案例名称 wcinput:输入文件夹 wcoutput:输出文件夹