1.安装centos
2.关闭防火墙
sudo firewall-cmd --state::查看防火墙状态
sudo systemctl stop firewalld.service::关闭防火墙
sudo systemctl disable firewalld.service::禁止防火墙
3.修改ip和
3.1修改静态ip
cd /etc/sysconfig/network-scripts::到此目录下
cp ifcfg-enp0s3 ifcfg-enp0s8::把ifcfg-enp0s3复制一份改成ifcfg-enp0s8
sudo vim ifcfg-enp0s8::修改此文件
sudo systemctl restart network.service::重启网卡,或者重启电脑
3.2修改主机名和host文件
sudo hostnamectl set-hostname hadoop81::修改主机名
修改host文件
sudo vi /etc/hosts
4.安装jdk
用rz命令上传jdk
cd /usr ::到/usr目录下解压
sudo tar -zxvf ~/jdk-8u162-linux-x64.tar.gz -C . ::解压
配置环境变量
sudo vim /etc/profile.d/java.sh
(:r !echo /usr/jdk...可以快速找到目录)
source /etc/profile ::让文件生效
5.安装hadoop
rz上传hadoop文件
sudo mkdir /app ::在根目录下建立/app
sudo chown -R keys:keys . 把此目录给keys用户
sudo tar -zxvf ~/hadoop-2.7.6.tar.gz -C . ::到/app下解压
cd /app/hadoop-2.7.6/share ::到此目录下删除doc(为了使反应快)
sudo rm -R doc ::删除doc目录
6.配置hadoop::cd /app/hadoop-2.7.6/etc/hadoop::到此目录下
6.1hadoop-env.sh
6.2core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop81:8020</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/app/hadoop/tmp</value>
</property>
</configuration>
6.3hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>hadoop83:50090</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/app/hadoop/dfs/name,/app/hadoop/dfs2/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/app/hadoop/dfs/data,/app/hadoop/dfs2/data</value>
</property>
</configuration>
6.4 mapred-site.xml
cp mapred-site.xml.template mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
6.5yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>hadoop81</value>
</property>
<property>
<name>yarn.nodemanager.local-dirs</name>
<value>/app/hadoop/nm-local-dir</value>
</property>
</configuration>
6.6slaves
7.hadoop环境变量
export HADOOP_HOME=/app/hadoop-2.7.6
export PATH=$PATH:$HADOOP_HOME/binm /etc/profile.d/hadoop.sh
source /etc/profile ::让文件生效
成功后输入hdfs version 测试下
8.克隆环境
修改主机名和ip地址见第三步,修改成82,83.
8.建立ssh信任
ssh-keygen -t rsa -P "123456" ::在hadoop31上添加
9.格式化namenode
hdfs namenode -format ::格式化
10启动
/app/hadoop-2.7.6/sbin/start-dfs.sh
/app/hadoop-2.7.6/sbin/start-yarn.sh
使用jps命令查看每个系统的进程,如第一张表分布即为成功