Hadoop configuration installation

1. Unzip the installation package

[root@localhost software]# tar -zxvf hadoop-2.6.0-cdh5.14.2.tar.gz

2. Change the name:

[root@localhost software]# mv hadoop-2.6.0-cdh5.14.2 hadoop

3. Enter hadoop in etc under the Hadoop folder

[root@localhost software]# cd hadoop
[root@localhost hadoop]# cd etc/Hadoop

4. Modify the vi hadoop-env.sh file

[root@hadoop5 hadoop]# vi hadoop-env.sh

#export JAVA_HOME=${JAVA_HOME}
export JAVA_HOME=/root/software/jdk1.8.0_221

5. Modify the vi core-site.xml file

[root@hadoop5 hadoop]# vi core-site.xml

<configuration>
 <property>
   <name>fs.defaultFS</name>
   <value>hdfs://hadoop5:9000</value>   #修改成本主机的名称
 </property>
 <property>
   <name>hadoop.tmp.dir</name>
   <value>/root/software/hadoop/tmp</value>
 </property>
 <property>
   <name>hadoop.proxyuser.root.hosts</name>
   <value>*</value>
 </property>
 <property>
   <name>hadoop.proxyuser.root.groups</name>
   <value>*</value>
  </property>
</configuration> 

6. Modify the hdfs-site.xml file:

[root@localhost hadoop]# vi hdfs-site.xml

<configuration>
 <property>
  <name>dfs.replication</name>
  <value>1</value>
 </property>
</configuration>

7. Modify the vi mapred-site.xml.template file:

[root@localhost hadoop]# vi mapred-site.xml.template

<configuration>
<property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
</property>
</configuration>

Rename after configuration:

mv mapred-site.xml.template mapred-site.xml

8. Modify the Vi /etc/profile file:

[root@localhost hadoop]# vi /etc.profile

export JAVA_HOME=/root/software/jdk1.8.0_221
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin/:/root/bin
export HADOOP_HOME=/root/software/hadoop
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH

9. Make the configuration file effective:

[root@localhost hadoop]# source /etc/profile

10. Format HDFS

[root@localhost hadoop]#hdfs namenode -format

11. Exit to the hadoop file in the software

[root@localhost hadoop]# cd …/…

12. Input: ll

[root@localhost hadoop]# ll

Will show:

drwxr-xr-x.  2 1106 4001   137 Mar 28  2018 bin
drwxr-xr-x.  2 1106 4001   166 Mar 28  2018 bin-mapreduce1
drwxr-xr-x.  3 1106 4001  4096 Mar 28  2018 cloudera
drwxr-xr-x.  6 1106 4001   109 Mar 28  2018 etc
drwxr-xr-x.  5 1106 4001    43 Mar 28  2018 examples
drwxr-xr-x.  3 1106 4001    28 Mar 28  2018 examples-mapreduce1
drwxr-xr-x.  2 1106 4001   106 Mar 28  2018 include
drwxr-xr-x.  3 1106 4001    20 Mar 28  2018 lib
drwxr-xr-x.  3 1106 4001   261 Mar 28  2018 libexec
-rw-r--r--.  1 1106 4001 85063 Mar 28  2018 LICENSE.txt
drwxr-xr-x.  3 root root  4096 Mar 14 15:46 logs
-rw-r--r--.  1 1106 4001 14978 Mar 28  2018 NOTICE.txt
-rw-r--r--.  1 1106 4001  1366 Mar 28  2018 README.txt
drwxr-xr-x.  3 1106 4001  4096 Mar 28  2018 sbin
drwxr-xr-x.  4 1106 4001    31 Mar 28  2018 share
drwxr-xr-x. 18 1106 4001  4096 Mar 28  2018 src
drwxr-xr-x.  4 root root    37 Mar 14 15:46 tmp

13, start

[root@localhost hadoop]#start-all.sh
[root@localhost hadoop]#JPS

Will show:

29942 ResourceManager
30232 Jps
30029 NodeManager
29551 NameNode
29663 DataNode
29807 SecondaryNameNode

14. Now the configuration is complete

15. If there is a configuration error and there are missing files after entering JPS, check the log to find the error

[root@localhost hadoop]#tail -50f
logs/hadoop-root-datanode-hadoop2.log #Change
to the corresponding missing file name

Guess you like

Origin blog.csdn.net/qq_42005540/article/details/114796413