hive上传下载数据

----------------------------------------
--read me
--方式1:适用于工具传输
--方式2:适用于手动临时性传输
----------------------------------------
.
.
.
.
.
.
--##########################################################################################################方式1
--read me:方式1适用于从oracle数据库直接用shell工具,将数据传到hive库中
-----------------------------------------------------------STEP1
--首先进入到hadoop 141服务器的如下目录
[fast@hn-hdp-01 jobs]$ /home/fast/sqoop-1.4.6/jobs

-----------------------------------------------------------STEP2
--打开其中.SH,进行参数配置
[fast@hn-hdp-01 jobs]$vim ora2hive.sh
{如下是ora2hive.sh的内容}
#/bin/sh

export PATH=$PATH:/home/fast/sqoop-1.4.6/bin

url=jdbc:oracle:thin:@10.92.190.65:1521/fast --修改1,表在哪个服务器,在此修改为该服务器的IP地址和端口
driver= --默认不写,不做修改
#username=fasthndeve --Oracle数据库用户名(与上面IP是对应的)
#password='F@sthndeve*123'--Oracle数据库密码(与上面IP是对应的)
#username=FASthn4ga
#password='AF@sthn4g*123'
#username=FASTHNCFG
#password='F@sthncfg*123'
username=fastmdt
password='F@stmdt*123'
table=CFG_CELL_POORCOVERAGE_HIVE --Oracle数据库中的名字(表名需要变成大写字母)
COLUMNS=SDATE,PROVINCE,CITY,COUNTY,ENODEB_ID,LCRID,CELL_NAME,LONGITUDE,LATITUDE --oracle数据表的字段(字段名需要变成大写字母)
#table=CFG_GRID10_2 --默认
KEY=LONGITUDE --任一数值型的字段,做key即可
mapper=10 --默认
hive_db=fastdo_lte --默认(工参数据都存放在默认库)
#hive_tb=CFG_SITEINFO_TDLTE_P10 --已#掉,不管
hive_tb=$table --默认
sqoop import --connect $url \ --默认
--username $username \
--password $password \
--columns $COLUMNS \
--table $table \
--hive-import \
--split-by $KEY \
--hive-database $hive_db \
--hive-table $hive_tb \
--input-null-string '\\N' \
--input-null-non-string '\\N' \
#--where "CITY='XINYANG' AND SDATE>=to_date('20180103','YYYYMMDD')" \
#--where "'2018-04-01'>=substr('sdate',1,10)" \
-m $mapper \ --默认


-----------------------------------------------------------STEP3
--执行
[fast@hn-hdp-01 jobs]$sh ./ora2hive.sh

-----------------------------------------------------------STEP4
--在hive上检查是否传输成功,检查字段名或者数据条数(与Oracle数据条数做对比)
hive >desc table表;

#alter table CFG_SITEINFO_TDLTE rename to CFG_SITEINFO_TDLTE_2018xx 修改为几月份的数据
#create table CFG_SITEINFO_TDLTE as select * from test.CFG_SITEINFO_TDLTE

--##########################################################################################################方式2
--read me:方式2适用于手动传输本地数据到hive库
-----------------------------------------------------------第一步:首先在Hive的fastdo_lte库下面建立该表。
--注意:要是建立的表有分区,就加上分区标识,没有分区,--去掉分区标识即可。
drop table TDLTE_MRO_LOCATE_HOUR_20170523;
CREATE TABLE IF NOT EXISTS TDLTE_MRO_LOCATE_HOUR_20170523(
GROUPID STRING,
N1_CELL_ID STRING,
N1_RSRP STRING,
N2_CELL_ID STRING,
N2_RSRP STRING,
N3_CELL_ID STRING,
N3_RSRP STRING,
N4_CELL_ID STRING,
N4_RSRP STRING,
N5_CELL_ID STRING,
N5_RSRP STRING,
N6_CELL_ID STRING,
N6_RSRP STRING,
N7_CELL_ID STRING,
N7_RSRP STRING,
S_CELL_ID STRING,
AOA STRING,
TA STRING,
MROID STRING,
S_RSRP STRING,
MRO_TS STRING,
MRO_MMEUES1APID STRING,
MRO_MMEGROUPID STRING,
MRO_MMECODE STRING,
S_RSRQ STRING,
LTESCSINRUL STRING,
SDATE STRING,
CITY STRING,
obj_timeStamp STRING,
RESERVED1 STRING,
RESERVED2 STRING,
callID STRING,
iMSI STRING,
mro_error STRING,
findncell_v STRING,
sum_v STRING,
N1_PCI STRING,
N1_EARFCN STRING,
N2_PCI STRING,
N2_EARFCN STRING,
N3_PCI STRING,
N3_EARFCN STRING,
N4_PCI STRING,
N4_EARFCN STRING,
N5_PCI STRING,
N5_EARFCN STRING,
N6_PCI STRING,
N6_EARFCN STRING,
N7_PCI STRING,
N7_EARFCN STRING,
SCELL_PCI STRING,
SCELL_EARFCN STRING,
CT_MAXRSRP STRING,
CT_RSRQ STRING,
CT_PCI STRING,
CT_EARFCN STRING,
CM_MAXRSRP STRING,
CM_RSRQ STRING,
CM_PCI STRING,
CM_EARFCN STRING,
CU_MAXRSRP STRING,
CU_RSRQ STRING,
CU_PCI STRING,
CU_EARFCN STRING,
DL_SINR STRING,
sampleX_type STRING,
DL_RATE STRING,
DL_CQI STRING,
reserved3 STRING,
reserved4 STRING,
LONGITUDE STRING,
LATITUDE STRING,
planid STRING,
hight STRING,
err STRING,
confidence_degree STRING
)
PARTITIONED BY (ds STRING,cityid STRING) --分区标识(若需要分许,就加上PARTITIONED BY (ds STRING,cityid STRING);若不需要分区,就删除PARTITIONED BY (ds STRING,cityid STRING))
ROW FORMAT serde
'org.apache.hadoop.hive.serde2.OpenCSVSerde'
with SERDEPROPERTIES ("separatorChar"=",","quotechar"="\"");


-----------------------------------------------------------第二步:在141hive环境下运行数据load命令
--工参类数据的hive库都是 fastdo_lte,需要固定分析,就加上partition(ds='20170523',cityid='ZHENGZHOU'),若不需要就删除partition(ds='20170523',cityid='ZHENGZHOU')
hive> use hive库;
hive> load data local inpath '/data02/yyl/TDLTE_MRO_LOCATE_HOUR_20170523/*.csv' into table TDLTE_MRO_LOCATE_HOUR_20170523 partition(ds='20170523',cityid='ZHENGZHOU');


alter table TDLTE_MRO_LOCATE_HOUR_20200202 add partition(cityname="JIYUAN",ds="20190322") location '/do/lte/fast/20190322/';

HIVE数据下载


--step1 将mrxdr_xy这张指纹表下载到141服务器的目录下 Hive环境下操作该命令
>hive
insert overwrite local directory '/data08/yyl/' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' select * from test.RPT_CELL_DISTANCE_2;

--step2 删除目录下的.crc文件-
[root@root@hn-hdp-01 ~]# cd /data08/yyl/ --进入该目录下
[root@root@hn-hdp-01 yyl]# rm -f .*.crc --清空该目录下的.crc文件


--step3 合并文件,文件格式一般是6为数值夹扩展数字


[root@root@hn-hdp-01 yyl]#cat ./00* > aaa.csv --首先命名一个临时表名
[root@root@hn-hdp-01 yyl]#iconv -f UTF-8 -t GBK aaa.csv -o RPT_CELL_DISTANCE_2.csv --将aaa.csv
转换编码方式,并修改为真实表名。

--step4 将生成的.csv文件,通过ftp下载到57服务器的相应目录下

猜你喜欢

转载自www.cnblogs.com/111fyh/p/10695151.html