使用sqoop1將hive匯入mysql
阿新 • • 發佈:2019-01-30
#!/bin/sh
#資料連線
srcConnect="connect jdbc:mysql://10.2.1.1:3306/test"
#臨時表存放的目錄
tempTabPath=/user/test
sql="select NVL(rowkey,'') as rowkey,
NVL(projid,'') as projid,
NVL(devid,'') as devid,
NVL(barcode,'') as barcode,
NVL(devaddr,'') as devaddr,
NVL(runmode_mb,'') as runmode_mb,
NVL(starttime,TIMESTAMP('1971-01-01 00:30:00')) as starttime,
NVL(endttime,TIMESTAMP('1971-01-01 00:30:00')) as endttime,
NVL(receivetime,TIMESTAMP('1971-01-01 00:30:00')) as receivetime
from test"
echo "++++++++++++++++++++開始匯入資料:++++++++++++++++++++++++++++++++"
#將hive表的資料,匯入到hdfs上
hive -e "
use default;
insert overwrite directory '${tempTabPath}' row format delimited fields terminated by '\t' ${sql};
"
#利用sqoop,將hive得資料匯入到mysql叢集
sqoop export \
--${srcConnect} \
--username root \
--password 1234qwer \
--table speed_test \
--export-dir ${tempTabPath} \
--input-fields-terminated-by '\t' \
-- m 5
echo "++++++++++++++++++++結束匯入資料:++++++++++++++++++++++++++++++++"
hadoop fs -rm -r ${tempTabPath}
#資料連線
srcConnect="connect jdbc:mysql://10.2.1.1:3306/test"
#臨時表存放的目錄
tempTabPath=/user/test
sql="select NVL(rowkey,'') as rowkey,
NVL(projid,'') as projid,
NVL(devid,'') as devid,
NVL(barcode,'') as barcode,
NVL(devaddr,'') as devaddr,
NVL(runmode_mb,'') as runmode_mb,
NVL(starttime,TIMESTAMP('1971-01-01 00:30:00')) as starttime,
NVL(endttime,TIMESTAMP('1971-01-01 00:30:00')) as endttime,
NVL(receivetime,TIMESTAMP('1971-01-01 00:30:00')) as receivetime
from test"
echo "++++++++++++++++++++開始匯入資料:++++++++++++++++++++++++++++++++"
#將hive表的資料,匯入到hdfs上
hive -e "
use default;
insert overwrite directory '${tempTabPath}' row format delimited fields terminated by '\t' ${sql};
"
#利用sqoop,將hive得資料匯入到mysql叢集
sqoop export \
--${srcConnect} \
--username root \
--password 1234qwer \
--table speed_test \
--export-dir ${tempTabPath} \
--input-fields-terminated-by '\t' \
-- m 5
echo "++++++++++++++++++++結束匯入資料:++++++++++++++++++++++++++++++++"
hadoop fs -rm -r ${tempTabPath}
執行指令碼前,在mysql建立一個跟hive表結構相同的表,否則,匯入資料報錯。