大数据工作中常用的20条语句(hive,yarn,Linux,正则表达式等)

版权声明: https://blog.csdn.net/KamRoseLee/article/details/84242983

1.查看hadoop任务
yarn application -list |grep 用户名
yarn application -kill application_1443009203033_86621

2.liunx nohup命令
nohup ./label_driver.sh > /home/disk4/bi/driver_label/label_driver20150325.log  2>&1 &
nohup ./testpass.sh > logs/testpass_20150323.log  2>&1 &

3.hive添加字段
ALTER TABLE label_driver_z add columns(total_miss_cnt int comment '累积爽约订单数');

4.hive修改表分隔方式、修改空值描述符
ALTER TABLE city SET SERDEPROPERTIES ('field.delim' = '\t');
ALTER TABLE city SET SERDEPROPERTIES('serialization.format' = '\t');
ALTER TABLE city SET SERDEPROPERTIES('serialization.null.format' = '');
ALTER TABLE dm_market_acti_reg_d CHANGE dt dt int COMMENT '业务日期' FIRST boci;
ALTER TABLE employee RENAME TO emp;
ALTER TABLE dm_market_boci_first_call_d add COLUMNS (dt int comment '业务日期');

ALTER TABLE dw_customer add COLUMNS (ord_succ_d int comment '当日完成订单数');


5.hive查看函数的用法
desc function extended add_months;

6.hive查看表存储实际路径
desc extended ods_order partition(dt=20151010);

7.hive insert语法
insert OVERWRITE table test_an select 11111111111,'a' from label_driver_basic limit 1;
mysql -uhive -p'xiaojukeji' -hhdp999.qq -P3306 --default-character-set=utf8 test

8.不开启优化
set hive.fetch.task.conversion=minimal,more
set hive.exec.parallel=true;

9.查看使用的分区
explain dependency query_str

10.删除文件
hadoop fs -rmr /user/rd/bi_dm/dm_tag_pass_ord_tot_d;
dfs -rmr /user/rd/bi_dm/app_driver_ord_distri_d;


11.hive选择执行引擎
set hive.execution.engine=tez,spark,mr
set mapred.reduce.tasks=20;

12.手机号码正则表达式
'^1[35847][0-9]{9}$'

13.查找某个文件
find . -name app_tag_pass1_info.sh

14.COALESCE( value1,value2,... )


17.hive删除分区
ALTER TABLE login DROP IF EXISTS PARTITION (dt='2008-08-08');


19.hive下载目录
http://archive.apache.org/dist/hive/

20.压缩命令
tar -cvf /tmp/etc.tar /etc

21.改变文件目录权限
chown -R xiaoju  /home/disk1/tagol-data/weather/
chgrp -R xiaoju  /home/disk1/tagol-data/weather/

22.启动langley
nohup /data/xiaoju/langley/srv/bin/langley_srv 9001 /data1 &

23.取文本第及列数据
cat test|awk '{print $2}'
awk '{print $1}' dw_pass_tagol_a_20150708.csv > tagol.csv

猜你喜欢

转载自blog.csdn.net/KamRoseLee/article/details/84242983