方法1
tmp_channel_hive_file="/tmp/tmp_channel_hive_file"
tmp_channel_hive_file_new="/tmp/tmp_channel_hive_file_new"
db="t_merchant_shop_channel_top_daily_new"
hive_db="tmp.tmp_test_table"
mysqlhelper -s dbname -e "select id,shop_id,plat,first_channel,second_channel,pv,uv,gmv,amount,paid_uv,paid_order,dt from $db where dt = ‘$yesday‘" > $tmp_channel_hive_file
#打印列写入文件
awk -F"\t" ‘{for(i=1;i<NF;i++){printf("%s\t",$i);}printf("%s\n",$NF)}‘ $tmp_channel_hive_file > $tmp_channel_hive_file_new
本地文件导入
hive -e "load data local inpath ‘${tmp_channel_hive_file_new}‘ overwrite into table $hive_db PARTITION (dt=‘$yesday‘)"
方法2,可以从hive table 表里直接读取插入
hive -e "insert overwrite table tmp.tmp_test_table PARTITION (dt=‘$yesday‘)
select id,shop_id,plat,first_channel,second_channel,pv,uv,gmv,amount,paid_uv,paid_order,dt from hive_table where dt = ‘$yesday‘
"
查看写入是否成功
hadoop fs -ls /user/hive/warehouse/hive_table/dt=2016-01*
或者
show partitions hive_table