hadoop configuration yum

hadoop installation

# Modify the host name 
hostnamectl the SET-hostname hadoop 
# modify hosts 
vim / etc / hosts 
# appended to the end 
10.0.0.11 hadoop

Installation prerequisite dependencies

yum -y install openssh*
yum -y install man*
yum -y install compat-libstdc++-33*
yum -y install libaio-0.*
yum -y install libaio-devel*
yum -y install sysstat-9.*
yum -y install glibc-2.*
yum -y install glibc-devel-2.* glibc-headers-2.*
yum -y install ksh-2*
yum -y install libgcc-4.*
yum -y install libstdc++-4.*
yum -y install libstdc++-4.*.i686*
yum -y install libstdc++-devel-4.*
yum -y install gcc-4.*x86_64*
yum -y install gcc-c++-4.*x86_64*
yum -y install elfutils-libelf-0*x86_64* elfutils-libelf-devel-0*x86_64*
yum -y install elfutils-libelf-0*i686* elfutils-libelf-devel-0*i686*
yum -y install libtool-ltdl*i686*
yum -y install ncurses*i686*
yum -y install ncurses*
yum -y install readline*
yum -y install unixODBC*
yum -y install zlib
yum -y install zlib*
yum -y install openssl*
yum -y install patch
yum -y install git
yum -y install lzo-devel zlib-devel gcc autoconf automake libtool
yum -y install lzop
yum -y install lrzsz
yum -y install lzo-devel  zlib-devel  gcc autoconf automake libtool
yum -y install nc
yum -y install glibc
yum -y install gzip
yum -y install zlib
yum -y install gcc
yum -y install gcc-c++
yum -y install make
yum -y install protobuf
yum -y install protoc
yum -y install cmake
yum -y install openssl-devel
yum -y install ncurses-devel
yum -y install unzip
yum -y install telnet
yum -y install telnet-server
yum -y install wget
yum -y install svn
yum -y install ntpdate

Download all of the following things

https://pan.baidu.com/s/1ouCGECr-mRg5qjXf8WzCqw extraction code: 04lv

Install java

# Delete the last system comes with java 
# first determine whether there is java installed 
RPM -qa | grep java 
# if we use rpm -e xxxxxxxxxxxxx (on behalf of the results of what you have just executed out of order) --nodeps (without considering the associated deletion) 
RPM -e-tzdata the Java-2014G-1.el6.noarch - nodeps 
# will jdk-8u45-linux-x64.tar installation package uploaded to the / usr directory 
cd / usr 
rz 
# write the following script : 
vim deljava.sh 
! # / bin / bash 
for i in `RPM -qa | grep java` 
do 
RPM -e $ i - nodeps 
DONE 
# execute scripts 
SH deljava.sh 
# into the / usr 
cd / usr 
LS 
# unzip the compressed 
the tar xzvf JDK-8u45-Linux-x64.tar.gz 
# rename 
Music Videos jdk1.8.0_45 / Java 
# configure the environment variables 
Vim / etc / Profile 
# tail added 
export JAVA_HOME = / usr / java 
export JRE_HOME = / usr / java / jre
the CLASSPATH = Export:. $ the JAVA_HOME / lib / Dt.jar: $ the JAVA_HOME / lib / the tools.jar: $ the JRE_HOME / lib 
Export the PATH the PATH = $: $ the JAVA_HOME / bin 
# build environment variable 
source / etc / profile

Installation pseudo HADOOP-2.7.3 Distributed System

# Configure trust 
# Upload sshUserSetup.sh script to the / root directory 
rz 
# execute the following command 
# Run the script name -user you need to trust users -hosts "hadoop hadoop1 hadoop2 hadoop3" not check yes 
sshUserSetup.sh -user SH -hosts the root "hadoop" -advanced -noPromptPassphrase 
# hadoop installation package uploaded to the lower-2.7.3.tar hadoop / usr / local 
# extracting installation package 
Rz 
the tar-2.7.3.tar.gz xzvf hadoop 
future will extract the directory # renamed 
mv hadoop hadoop-2.7.3 
# configuration environment variable 
vim / etc / profile # add at the end 
Export HADOOP_HOME = / usr / local / hadoop 
#export HADOOP_OPTS = "- Djava.library.path = $ HADOOP_PREFIX / lib: $ HADOOP_PREFIX / lib / Native " 
Export LD_LIBRARY_PATH = $ HADOOP_HOME / lib / Native  
Export HADOOP_COMMON_LIB_NATIVE_DIR = / usr / local / hadoop / lib / Native
Export HADOOP_OPTS =" - Djava.library.path = / usr / local / hadoop / lib "
HADOOP_ROOT_LOGGER = DEBUG #export, Console 
Export the PATH = $ the PATH: $ JAVA_HOME / bin: $ HADOOP_HOME / bin: $ HADOOP_HOME / sbin 
# build environment variable 
source / etc / profile

HADOOP cluster configuration

# HADOOP configuration file into the directory 
cd / usr / local / hadoop / etc / hadoop / 
# HADOOP environment configuration file 
vim hadoop-env.sh 
# line 25 instead 
Export JAVA_HOME = / usr / the Java 
###### ########## 
vim Core-the site.xml 
# two 
# <the Configuration> 
# </ the Configuration> 
# Add the following to 
<-! formulated to provide services outside of our system of port HADOOP and who is the NameNode -> 
<Property> 
  <name> fs.defaultFS </ name> 
  <value> HDFS: // hadoop: 9000 </ value> 
</ Property> 

! <- hADOOP system to store temporary files generated -> 

<Property> 
  <name> hadoop.tmp.dir </ name> 
  <value> / var / Hadoop / tmp </ value> 
</ Property> 
############### ## 
vim HDFS-Site.xml 
<property>xml 
  <name>dfs.replication</name>
  <value>1</value>
</property>
<property>
  <name>dfs.permissions.enabled</name>
  <value>false</value>
</property>
###################
#拷贝文件
cp mapred-site.xml.template mapred-site.xml
vim mapred-site.xml
<property>
  <name>mapreduce.framework.name</name>
  <value>yarn</value>
</property>
#####################
vim yarn-site.xml
<property>
  <name>yarn.resourcemanager.hostname</name>
  <value>hadoop</value>
</property>
<property>
  <name>yarn.nodemanager.aux-services</name>
  <value>mapreduce_shuffle</value>
</property>

hadoop operation

# Format HADOOP the HDFS file system 
HDFS the NameNode -format 
# HADOOP start cluster 
start-all.sh 
input 3 times yes 
# close HADOOP cluster 
stop-all.sh 
# see the following on the success of the process 
[root @ hadoop hadoop] # jps 
Jps 5973 
5447 SecondaryNameNode 
5687 the NodeManager 
5592 the ResourceManager 
5177 the NameNode 
5295 DataNode

Close warning

/usr/local/hadoop/etc/hadoop/log4j.properties Vim 
# added to the last 
log4j.logger.org.apache.hadoop.util.NativeCodeLoader = ERROR

After closing port 50070 to ensure that browser to access the firewall ip +

 

Browser access port ip + 8088

Guess you like

Origin www.cnblogs.com/dasiji/p/11453414.html