环境:CDH 5.12.1版本 ,mysql 5.7

1、mysql表结构

datax将mysql数据导入hive表

 2、mysql表数据(user)

datax将mysql数据导入hive表

 3、下载datax

wget http://datax-opensource.oss-cn-hangzhou.aliyuncs.com/datax.tar.gz

4、在datax的job目录编写一个mysql2hive.json文件

a) 下面是全量导入

{
    "job": {
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "column": [
                            "id",
                            "name",
                            "age",
                            "create_time"
                        ],
                        "connection": [
                            {
                                "jdbcUrl": [
                                    "jdbc:mysql://192.168.75.101:3306/test"
                                ],
                                "table": [
                                    "user"
                                ]
                            }
                        ],
                        "password": "yang156122",
                        "username": "root",
                        "where": ""
                    }
                },
                "writer": {
                    "name": "hdfswriter",
                    "parameter": {
                        "column": [
                            {
                                "name": "id",
                                "type": "INT"
                            },
                            {
                                "name": "name",
                                "type": "STRING"
                            },
                            {
                                "name": "age",
                                "type": "INT"
                            },
                            {
                                "name": "create_time",
                                "type": "TIMESTAMP"
                            }
                        ],
                        "compress": "gzip",
                        "defaultFS": "hdfs://192.168.75.101:8020",
                        "fieldDelimiter": "\t",
                        "fileName": "user",
                        "fileType": "text",
                        "path": "/user/datax/data/ceshi",
                        "writeMode": "append"
                    }
                }
            }
        ],
        "setting": {
            "speed": {
                "channel": "1"
            }
        }
    }
}
View Code

相关文章: