Pseudo HBase distributed deployment

1, Basic Configuration

# Configure hostname
hostnamectl set-hostname {master, node1 , node2}

#配置hosts
cat <<EOF >> /etc/hosts
192.168.2.124 master
192.168.2.125 node1
192.168.2.126 node2
EOF

#安装JDK
yum install -y java-1.8.0-openjdk-devel.x86_64
#配置java路径
cat <<EOF | sudo tee /etc/profile.d/hbase-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.222.b10-1.el7_7.x86_64
export CLASSPATH=.:\$JAVA_HOME/lib/dt.jar:\$JAVA_HOME/lib/tools.jar
export PATH=$PATH:\$JAVA_HOME/bin
EOF
source /etc/profile.d/hbase-env.sh

# New user
adduser hadoop
echo "123456" | passwd --stdin hadoop
the usermod -Ag Wheel hadoop
su - hadoop

#SSH基于KEY的验证
ssh-keygen -t rsa -P ""
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys

2, hadoop deployment

#下载hadoop
wget http://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.1.2/hadoop-3.1.2.tar.gz
tar -xzvf hadoop-3.1.2.tar.gz
rm hadoop-3.1.2-src.tar.gz
mv hadoop-3.1.2/ /usr/local/

#设置环境变量
cat <<EOF | sudo tee /etc/profile.d/hbase-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.222.b10-1.el7_7.x86_64
export CLASSPATH=.:\$JAVA_HOME/lib/dt.jar:\$JAVA_HOME/lib/tools.ja
export HADOOP_HOME=/usr/local/hadoop-3.1.2
export HADOOP_HDFS_HOME=\$HADOOP_HOME
export HADOOP_MAPRED_HOME=\$HADOOP_HOME
export YARN_HOME=\$HADOOP_HOME
export HADOOP_COMMON_HOME=\$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=\$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib:$HADOOP_COMMON_LIB_NATIVE_DIR"
export PATH=\$PATH::\$JAVA_HOME/bin:\$HADOOP_HOME/bin:\$HADOOP_HOME/sbin
EOF
source /etc/profile.d/hbase-env.sh

# View version
hadoop version

sudo mkdir -p /hadoop/hdfs/{namenode,datanode}


# Edit the configuration file

vi /usr/local/hadoop-3.1.2/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.222.b10-1.el7_7.x86_64
export HADOOP_PID_DIR=/hadoop/pids

vi /usr/local/hadoop-3.1.2/etc/hadoop/core-site.xml
<configuration>
  <property>
      <name>fs.default.name</name>
      <value>hdfs://localhost:9000</value>
      <description>The default file system URI</description>
   </property>
</configuration>


sudo mkdir -p /hadoop/hdfs/{namenode,datanode}
sudo chown -R hadoop:hadoop /hadoop

vi /usr/local/hadoop-3.1.2/etc/hadoop/hdfs-site.xml
<configuration>
  <property>
      <name>dfs.replication</name>
      <value>1</value>
   </property>
    
   <property>
      <name>dfs.name.dir</name>
      <value>file:///hadoop/hdfs/namenode</value>
   </property>
    
   <property>
      <name>dfs.data.dir</name>
      <value>file:///hadoop/hdfs/datanode</value>
   </property>
   
</configuration>


vi /usr/local/hadoop-3.1.2/etc/hadoop/mapred-site.xml
<configuration>
   <property>
      <name>mapreduce.framework.name</name>
      <value>yarn</value>
   </property>

</configuration>
  

vi /usr/local/hadoop-3.1.2/etc/hadoop/yarn-site.xml
<configuration>
    <property>
      <name>yarn.nodemanager.aux-services </ name>
      <value>mapreduce_shuffle</value>
   </property>
</configuration>

#namenode node format

hdfs namenode -format

# Hadoop start and stop services
start-dfs.sh
start-yarn.sh

stop-dfs.sh
stop-yarn.sh

# View NameNode state
http://192.168.2.124:9870
# View ResourceManager state
http://192.168.2.124:8088

3, hbase deployed
# downloading HBase
wget https://mirrors.tuna.tsinghua.edu.cn/apache/hbase/stable/hbase-1.4.10-bin.tar.gz
tar-zxf HBase-1.4.10-bin. the tar.gz
the sudo Music Videos HBase-1.4.10 / usr / local /

#设置环境变量
cat <<EOF | sudo tee /etc/profile.d/hbase-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.222.b10-1.el7_7.x86_64
export CLASSPATH=.:\$JAVA_HOME/lib/dt.jar:\$JAVA_HOME/lib/tools.ja
export HADOOP_HOME=/usr/local/hadoop-3.1.2
export HADOOP_HDFS_HOME=\$HADOOP_HOME
export HADOOP_MAPRED_HOME=\$HADOOP_HOME
export YARN_HOME=\$HADOOP_HOME
export HADOOP_COMMON_HOME=\$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=\$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib:$HADOOP_COMMON_LIB_NATIVE_DIR"
export ZOOKEEPER_HOME=/usr/local/zookeeper-3.5.5
export HBASE_HOME=/usr/local/hbase-1.4.10
export PATH=\$PATH::\$JAVA_HOME/bin:\$HADOOP_HOME/bin:\$HADOOP_HOME/sbin:\$ZOOKEEPER_HOME/bin:\$HBASE_HOME/bin
EOF
source /etc/profile.d/hbase-env.sh

# Hbase modify environment variables file
vi /usr/local/hbase-1.4.10/conf/hbase-env.sh
Export JAVA_HOME = / usr / lib / the JVM / the Java-1.8.0-openjdk-1.8.0.222.b10-1 .el7_7.x86_64
#export to false HBASE_MANAGES_ZK =
Export HBASE_PID_DIR = / Hadoop / PIDS

sudo mkdir -p /hadoop/{zookeeper,pids}
sudo chown -R hadoop:hadoop /hadoop


# Hbase modify configuration files

vi /usr/local/hbase-1.4.10/conf/hbase-site.xml
<configuration>
   <property>
      <name>hbase.rootdir</name>
      <value>hdfs://localhost:9000/hbase</value>
   </property>
    
   <property>
      <name>hbase.zookeeper.property.dataDir</name>
      <value>/hadoop/zookeeper</value>
   </property>
   
   <property>
     <name>hbase.cluster.distributed</name>
     <value>true</value>
   </property>
</configuration>

# Use HBaseShell

hbase shell
status

# Start / Stop HBase services
start-all.sh
start-hbase.sh

stop-hbase.sh
stop-all.sh

4, the final results

 

Guess you like

Origin www.cnblogs.com/fourw/p/11568561.html