HBASE通过bulkload方式将文件导入到HBASE数据库2

//--------------第0步导出ORACLE表到HDFS------------
bin/sqoop import \
--connect jdbc:oracle:thin:@192.23.0.53:1521/PDBORCL \
--username hljcreditc_user \
--password hljcredit_pwd \
--table T_B_CORE_BDATA \
--target-dir /user/beifeng/sqooporacletohdfscore \
--num-mappers 1 \
--delete-target-dir \
--direct

//---------------------第一步把文件转换为hfile 本例是把importtsv文件下的数据文件输出到hfileoutput转换成hfile-----------------------------------------------------------------------
export HBASE_HOME=/opt/modules/hbase-0.98.6-hadoop2
export HADOOP_HOME=/opt/modules/hadoop-2.7.7
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase mapredcp`:${HBASE_HOME}/conf \
        ${HADOOP_HOME}/bin/yarn jar \
${HBASE_HOME}/lib/hbase-server-0.98.6-hadoop2.jar importtsv \
-Dimporttsv.columns=HBASE_ROW_KEY,\
info:ID,\
info:GLBM,\
info:MSG_STATUSinfo,\
info:TEMP1info,\
info:UNISCIDinfo \
-Dimporttsv.bulk.output=hdfs://master:8020/user/beifeng/hbase/sqooptcoreoutput2 \
t_core \
hdfs://master:8020/user/beifeng/sqooporacletohdfscore

//------------第二步通过bulkload方式把hfile转换到hbase表中------------------
export HBASE_HOME=/opt/modules/hbase-0.98.6-hadoop2
export HADOOP_HOME=/opt/modules/hadoop-2.7.7
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase mapredcp`:${HBASE_HOME}/conf \
        ${HADOOP_HOME}/bin/yarn jar \
${HBASE_HOME}/lib/hbase-server-0.98.6-hadoop2.jar \
completebulkload \
hdfs://master:8020/user/beifeng/hbase/sqooptcoreoutput2 \
t_core
//通过BULKLOAD方式导入数据库

hbase> count 't_b_core_bdata',INTERVAL => 100000
//---------------------------------------------------------------------------------------------------

猜你喜欢

转载自blog.csdn.net/wanghenghengheng/article/details/89206333
今日推荐