使用datax将hive中的数据导入到clickhouse
Posted to.to
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了使用datax将hive中的数据导入到clickhouse相关的知识,希望对你有一定的参考价值。
{
"core": {
"transport": {
"channel": {
"speed": {
"byte": 10485760
}
}
}
},
"job": {
"setting": {
"speed": {
"byte":10485760
},
"errorLimit": {
"record": 0,
"percentage": 0.02
}
},
"content": [
{
"reader": {
"name": "hdfsreader",
"parameter": {
"hadoopConfig": {
"dfs.nameservices": "tqHadoopCluster",
"dfs.ha.namenodes.tqHadoopCluster": "nn1,nn2",
"dfs.namenode.rpc-address.tqHadoopCluster.nn1": "xxx.xxx.xxx.xxx:8020",
"dfs.namenode.rpc-address.tqHadoopCluster.nn2": "xxx.xxx.xxx.xxx:8020",
"dfs.client.failover.proxy.provider.tqHadoopCluster": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
},
"defaultFS": "hdfs://xxxxx",
"path": "/xxxxx_3/hiveWarehouse/population_tag_test",
"column": [
{
"index": 0,
"type": "string"
},
{
"index": 1,
"type": "string"
},
{
"index": 2,
"type": "string"
},
{
"index": 3,
"type": "string"
},
{
"index": 4,
"type": "string"
},
{
"index": 5,
"type": "string"
},
{
"index": 6,
"type": "string"
}
],
"fileType": "text",
"encoding": "UTF-8",
"fieldDelimiter": ",",
"nullFormat":"\\\\N"
}
},
"writer": {
"name": "clickhousewriter",
"parameter": {
"username": "default",
"password":"tianquekeji",
"column":["one_idcardnumber","one_name","one_sex","one_father","one_mother","one_landlord","one_disable_guardian"],
"connection": [
{
"jdbcUrl": "jdbc:clickhouse://xxxxxx:8123/test",
"table":["population_tag"]
}
]
}
}
}
]
}
}
python $DATAX_HOME/bin/datax.sh xxx.json
以上是关于使用datax将hive中的数据导入到clickhouse的主要内容,如果未能解决你的问题,请参考以下文章