Sqoop实现关系型数据库到hive的数据传输

时间:2023-03-08 17:29:48
Sqoop实现关系型数据库到hive的数据传输

Sqoop实现关系型数据库到hive的数据传输

sh脚本

#!/bin/sh
v_columns=NOTE_ID_1,NOTE_NAME_1,NOTE_ID_2,NOTE_NAME_2,NOTE_ID_3,NOTE_NAME_3,NOTE_ID_4,NOTE_NAME_4,NOTE_ID_5,NOTE_NAME_5,NOTE_ID_6,NOTE_NAME_6,
TYPE_VALUES,NOTE_NAME sqoop import --append --connect jdbc:oracle:thin:@132.232.19.119:2527:szdw --username ******* --password *******
--target-dir '/home/hadoop/data/etl_bss/view_loc_note_ydyw_hdgb' --m 1 --split-by NOTE_ID_1 --table ETL_BSS.VIEW_LOC_NOTE_YDYW_HDGB
--columns ${v_columns} --fields-terminated-by '\t';

hive 建表

 CREATE TABLE
VIEW_LOC_NOTE_YDYW_HDGB(
NOTE_ID_1 INT,
NOTE_NAME_1 STRING,
NOTE_ID_2 INT,
NOTE_NAME_2 STRING,
NOTE_ID_3 INT,
NOTE_NAME_3 STRING,
NOTE_ID_4 INT,
NOTE_NAME_4 STRING,
NOTE_ID_5 INT,
NOTE_NAME_5 STRING,
NOTE_ID_6 INT,
NOTE_NAME_6 STRING,
TYPE_VALUES INT,
NOTE_NAME STRING
)
COMMENT "THIS IS A VIEW_LOC_NOTE_YDYW_HDGB"
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE;

加载数据

load data inpath '/home/hadoop/data/etl_bss/view_loc_note_ydyw_hdgb' into table VIEW_LOC_NOTE_YDYW_HDGB;