1, install mysql-line services
# Download and install mysql
yum install mysql mysql mysql- Server- devel
# start mysql service
cd / etc /
init.d / mysqld Start
#mysql connection and log
mysql
# Modify login mysql user name and password to
the USE mysql;
the UPDATE User the SET Password = PASSWORD ( ' password ' ) the WHERE User = ' the root ' ;
# provided to allow telnet
the GRANT ALL PRIVILEGES the ON * * the TO. ' The root ' @ ' % ' the IDENTIFIED BY ' password ' the WITH the GRANT the OPTION;
# mandatory write
FLUSH PRIVILEGES;
2, the configuration hive
#1.上传安装包并解压
#然后,cd /export/servers/hive-1.2.1/conf
#2.修改hive-env.sh
# Set HADOOP_HOME to point to a specific hadoop install directory
HADOOP_HOME=/export/servers/hadoop-2.6.0-cdh5.14.0
# Hive Configuration Directory can be controlled by:
export HIVE_CONF_DIR=/export/servers/hive-1.2.1/conf
# Folder containing extra ibraries required for hive compilation/execution can be controlled by:
export HIVE_AUX_JARS_PATH=/export/servers/hive-1.2.1/lib
#3.新建hive-site.xml文件
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://node01:3306/hive?createDatabaseIfNotExist=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
<description>username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>password</value>
<description>password to use against metastore database</description>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value> amdha01 </ value>
</ Property>
</ Configuration>
# . 4 . Upload mysql jar package to connect the drive / Export / Servers / hive- 1.2 . . 1 / lib directory
# 5 . hive system environment variable configuration
Vim / etc / Profile
Export HIVE_HOME = / Export / Servers / hive- 1.2 . . 1
Export the PATH =: $ HIVE_HOME / bin: $ the PATH
3, hive remote service
#1.将node01上的hive安装包分发到node02、node03上,命令如下:
cd /export/servers
scp -r hive-1.2.1 node02:$PWD
scp -r hive-1.2.1 node03:$PWD
#2.在node01上启动hadoop集群
#3.在node01上启动hiveserver2服务,然后克隆当前会话,jps,查看一下当前进程
cd /export/servers/hive-1.2.1/bin
hiveserver2
#4.在node02上使用beeline远程连接至hive服务端
cd /export/servers/hive-1.2.1
bin/beeline
#5.输入远程连接协议,连接到指定hive服务的主机名和端口(默认10000)
!connect jdbc:hive2://node01:10000
#6.输入连接hive服务器的用户名和密码