Hive CLI

1、创内部建表,hive把数据移动到仓库目录,hive管理数据,所以drop表时会把元数据和数据一起删除

create table user (id int, name string) row format delimited fields terminated by '\t'

2、将本地文件系统上的数据导入到表中

load data local inpath '/root/user.txt' into table user;
#删除表对应目录已有的数据
load data local inpath '/root/user.txt' overwite into table user

3、hive cli中运行hdfs 命令

dfs -ls /;
dfs -mkdir /data;
4、创建外部表,外部数据的位置需要在创建表时指定,数据不由hive管理,不会把数据移动到自己的仓库目录,drop外部表时hive只会删除元数据而不会删除数据
create external table stubak (id int, name string) row format delimited fields terminated by '\t' location '/stubak';
5、创建分区表,按照nation分区,分区就是创建子文件夹,user/nation=China/user.txt
#创建分区表
create table user(id int, name string) partitioned by (nation string) row format delimited fields terminated by '\t';
#加载分区表数据
load data local inpath './user.txt' inset into table user partition (nation='China')
load data local inpath './user.txt' overwrite into table user partition (nation='China')
#查看分区
show partitions table;
 6、自定义函数
#将jar包添加classpath
add jar /root/NUDF.jar;
#创建临时函数getNation 
create temporary function getNation as 'hive.udf.NationUDF';
#调用
select id, name, getNation(nation) from beauty;
#将查询结果保存到HDFS
create table result row format delimited fields terminated by '\t' as select * from beauty order by id desc;
create table result row format delimited fields terminated by '\t' as select id, getNation(nation) from beauties;
 

猜你喜欢

转载自mvplee.iteye.com/blog/2248072
CLI
今日推荐