The method of Hive customary two

Refresh Hive partition table

#删除重建表
DROP TABLE asmp.tt_tmp;
create external table if not exists asmp.tt_tmp(
rssc_name	string,
rssc_code	string,
sst_name	string,
update_date string
)
STORED AS parquet
LOCATION '/user/asmp/hive/asmp/tt_tmp';
#修复分区
MSCK REPAIR TABLE asmp.tt_tmp;

Quickly remove hIve empty partition table

--删除所有空分区
alter table wd_tt_repair_part drop IF EXISTS PARTITION (partition_rssc='__HIVE_DEFAULT_PARTITION__') ;
--删除指定月份下面空分区(防止误删除历史数据)
alter table wd_tt_repair_part drop IF EXISTS PARTITION (partition_date=201906,partition_rssc='__HIVE_DEFAULT_PARTITION__') ;

Increase the hive to perform memory

SET mapreduce.map.memory.mb=8192;
SET mapreduce.map.java.opts='-Xmx6552M'; 
SET mapreduce.reduce.memory.mb=8192; 
SET mapreduce.reduce.java.opts='-Xmx6552M'; 
SET mapreduce.child.map.java.opts='-Xmx6552M';
SET mapred.child.java.opts='-Xmx4096M';

Uses dynamic partitions

-- 是否开启动态分区功能,默认false
hive.exec.dynamic.partition =true;
-- 动态分区的模式,默认为strict
hive.exec.dynamic.partition.mode = nonstrict;
-- 增加动态分区数量
set hive.exec.max.dynamic.partitions=2000;
set hive.exec.max.dynamic.partitions.pernode=2000;

hive-site.xml to add dynamic partitioning (sparksql use to take effect)

  <property>
    <name>hive.exec.dynamic.partition</name>
    <value>true</value>
  </property>
  <property>
    <name>hive.exec.dynamic.partition.mode</name>
    <value>nonstrict</value>
  </property>
  <property>
    <name>hive.exec.max.dynamic.partitions</name>
    <value>100000</value>
  </property>
  <property>
    <name>hive.exec.max.dynamic.partitions.pernode</name>
    <value>100000</value>
  </property>
  <property>
    <name>hive.exec.max.created.files</name>
    <value>500000</value>
  </property>

hive concurrent degree

set hive.exec.parallel=true;
set hive.exec.parallel.thread.number=8;

hive does not select a column of data

set hive.support.quoted.identifiers=none;
select `(oil_code)?+.+` from table_oil;

Achieve string turn map

str_to_map(concat_ws(',',collect_set(concat_ws(':',key, cast(value as string)))))

Modify the internal tables of the outer table

alter table tbl_name set TBLPROPERTIES('EXTERNAL'='FALSE')

mr read recursive directory settings

set mapreduce.input.fileinputformat.input.dir.recursive=true;
set hive.mapred.supports.subdirectories=true;

Displays the current database name

set hive.cli.print.current.db=true;
set hive.cli.print.header=true; 

and map the number of control reducer

---map设置
set mapred.max.split.size=128000000;
set mapred.min.split.size.per.node=8000000;
set mapred.min.split.size.per.rack=8000000;
---reducer设置
set hive.exec.reducers.bytes.per.reducer = 128000000;
set hive.exec.reducers.max = 999;
Published 118 original articles · won praise 25 · Views 150,000 +

Guess you like

Origin blog.csdn.net/lhxsir/article/details/99645619