//实验准备1. hostname192.168.216.128 master192.168.216.129 slave1192.168.216.130 slave22. vi /etc/hosts192.168.216.128 master192.168.216.129 slave1192.168.216.130 slave23. 关闭防火墙//master[root@localhost ~]# cd /usr/local/src[root@localhost src]# wget http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-2.7.1/hadoop-2.7.1.tar.gz[root@localhost src]# tar zxvf hadoop-2.7.1.tar.gz[root@localhost src]# mv hadoop-2.7.1 /usr/local/hadoop[root@localhost src]# cd /usr/local/hadoop/[root@localhost hadoop]# mkdir tmp dfs dfs/data dfs/name//接下来要将 “/usr/local/hadoop”这个目录分别拷贝到两个slave上[root@localhost hadoop]# yum install -y rsync[root@localhost hadoop]# yum install -y openssh-clients//安装好rsync,将文件拷贝到另外两台slave中[root@master ~]# rsync -av /usr/local/hadoop/ slave1:/usr/local/.................................................[yes/no] yes[root@master ~]# rsync -av /usr/local/hadoop/ slave2:/usr/local/.................................................[yes/no] yes//完成后去两台slave主机里检查是否存在拷贝好的文件,应该会在/usr/local/share里