sql=" set mapred.max.split.size=256000000; set mapred.min.split.size.per.node=100000000; set mapred.min.split.size.per.rack=100000000; set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; set hive.merge.smallfiles.avgsize=512000000; set hive.merge.mapfiles = true; set hive.exec.dynamici.partition=true; set hive.merge.mapredfiles = true; set hive.merge.size.per.task = 512000000; set hive.support.quoted.identifiers=None; set hive.exec.dynamic.partition.mode=nonstrict; set hive.exec.max.dynamic.partitions.pernode=1000; set hive.exec.max.dynamic.partitions=10000; set mapreduce.map.memory.mb=8192; set mapreduce.map.java.opts='-Xmx8192M' -XX:+UseG1GC;; set mapreduce.reduce.memory.mb=8192; set mapreduce.reduce.java.opts='-Xmx8192M'; set mapred.job.queue.name=root.; insert overwrite table tb partition(month,source) select * from tb wheremonth='202111'andsource='anhui_mobile' ; " hive -e "$sql"
2
1 2 3 4 5 6 7 8
hive -e " set mapred.job.queue.name=root.; set hive.exec.dynamic.partition.mode=nonstrict; insert overwrite table tb partition(month,source) select * from tb wheremonth='202101'andsource='unicom' distributebymonth,source ; "
能否参与评论,且看个人手段。