我是靠谱客的博主 动人皮带,最近开发中收集的这篇文章主要介绍使用datax进行增量复制1.环境准备2 驱动包准备3. 在源端创建库、创建表、插入数据4.在目标端创建库、创建表5.新建mysql2msql的json,此为全量同步6. 执行如下命令,完成全量数据导入7. 同步增量数据8.在datax端执行命令9.执行命令完成增量同步,觉得挺不错的,现在分享给大家,希望可以做个参考。

概述

1.环境准备

源端IP 192.168.56.199
源端库 testdb
源端表 t1
目标IP 192.168.56.199
目标库 testdb1
目标表 t1

2 驱动包准备

由于Mysql版本为8.0.20,需要下载相应的JDBC驱动包
驱动包为:mysql-connector-java-8.0.20.jar

3. 在源端创建库、创建表、插入数据

CREATE DATABASE testdb;
use testdb;
CREATE TABLE `t1` (
`id` int(11) DEFAULT NULL,
`name` varchar(100) DEFAULT NULL,
`deg` varchar(100) DEFAULT NULL,
`salary` int(11) DEFAULT NULL,
`dept` varchar(10) DEFAULT NULL,
`create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`is_delete` bigint(20) DEFAULT '1'
) ENGINE=InnoDB
insert
into `t1`(`id`,`name`,`deg`,`salary`,`dept`,`create_time`,`update_time`,`is_delete`) values (1201,'gopal','manager',50000,'TP','2018-06-17 18:54:32','2019-01-17 11:19:32',1),(1202,'manishahello','Proof reader',50000,'TPP','2018-06-15 18:54:32','2018-06-17 18:54:32',0),(1203,'khalillskjds','php dev',30000,'AC','2018-06-17 18:54:32','2019-03-14 09:18:27',1),(1204,'prasanth_xxx','php dev',30000,'AC','2018-06-17 18:54:32','2019-04-07 09:09:24',1),(1205,'kranthixxx','admin',20000,'TP','2018-06-17 18:54:32','2018-12-08 11:50:33',0),(1206,'garry','manager',50000,'TPC','2018-12-10 21:41:09','2018-12-10 21:41:09',1),(1207,'oliver','php dev',2000,'AC','2018-12-15 13:49:13','2018-12-15 13:49:13',1),(1208,'hello','phpDev',200,'TP','2018-12-16 09:41:48','2018-12-16 09:41:48',1),(1209,'ABC','HELLO',300,NULL,'2018-12-16 09:42:04','2018-12-16 09:42:24',1),(1210,'HELLO','HELLO',5800,'TP','2019-01-24 09:02:43','2019-01-24 09:02:43',1),(1211,'WORLD','TEST',8800,'AC','2019-01-24 09:03:15','2019-01-24 09:03:15',1),(1212,'sdfs','sdfsdf',8500,'AC','2019-03-13 22:01:38','2019-03-13 22:01:38',1),(1213,NULL,'sdfsdf',9800,'sdfsdf','2019-03-14 09:08:31','2019-03-14 09:08:54',1),(1214,'xxx','sdfsdf',9500,NULL,'2019-03-14 09:13:32','2019-03-14 09:13:44',0),(1215,'sdfsf','sdfsdfsdf',9870,'TP','2019-04-07 09:10:39','2019-04-07 09:11:18',0),(1216,'hello','HELLO',5600,'AC','2019-04-07 09:37:05','2019-04-07 09:37:05',1),(1217,'HELLO2','hello2',7800,'TP','2019-04-07 09:37:40','2019-04-07 09:38:17',1);
mysql> SELECT * FROM t1;
+------+--------------+--------------+--------+--------+---------------------+---------------------+-----------+
| id
| name
| deg
| salary | dept
| create_time
| update_time
| is_delete |
+------+--------------+--------------+--------+--------+---------------------+---------------------+-----------+
| 1201 | gopal
| manager
|
50000 | TP
| 2018-06-17 18:54:32 | 2019-01-17 11:19:32 |
1 |
| 1202 | manishahello | Proof reader |
50000 | TPP
| 2018-06-15 18:54:32 | 2018-06-17 18:54:32 |
0 |
| 1203 | khalillskjds | php dev
|
30000 | AC
| 2018-06-17 18:54:32 | 2019-03-14 09:18:27 |
1 |
| 1204 | prasanth_xxx | php dev
|
30000 | AC
| 2018-06-17 18:54:32 | 2019-04-07 09:09:24 |
1 |
| 1205 | kranthixxx
| admin
|
20000 | TP
| 2018-06-17 18:54:32 | 2018-12-08 11:50:33 |
0 |
| 1206 | garry
| manager
|
50000 | TPC
| 2018-12-10 21:41:09 | 2018-12-10 21:41:09 |
1 |
| 1207 | oliver
| php dev
|
2000 | AC
| 2018-12-15 13:49:13 | 2018-12-15 13:49:13 |
1 |
| 1208 | hello
| phpDev
|
200 | TP
| 2018-12-16 09:41:48 | 2018-12-16 09:41:48 |
1 |
| 1209 | ABC
| HELLO
|
300 | NULL
| 2018-12-16 09:42:04 | 2018-12-16 09:42:24 |
1 |
| 1210 | HELLO
| HELLO
|
5800 | TP
| 2019-01-24 09:02:43 | 2019-01-24 09:02:43 |
1 |
| 1211 | WORLD
| TEST
|
8800 | AC
| 2019-01-24 09:03:15 | 2019-01-24 09:03:15 |
1 |
| 1212 | sdfs
| sdfsdf
|
8500 | AC
| 2019-03-13 22:01:38 | 2019-03-13 22:01:38 |
1 |
| 1213 | NULL
| sdfsdf
|
9800 | sdfsdf | 2019-03-14 09:08:31 | 2019-03-14 09:08:54 |
1 |
| 1214 | xxx
| sdfsdf
|
9500 | NULL
| 2019-03-14 09:13:32 | 2019-03-14 09:13:44 |
0 |
| 1215 | sdfsf
| sdfsdfsdf
|
9870 | TP
| 2019-04-07 09:10:39 | 2019-04-07 09:11:18 |
0 |
| 1216 | hello
| HELLO
|
5600 | AC
| 2019-04-07 09:37:05 | 2019-04-07 09:37:05 |
1 |
| 1217 | HELLO2
| hello2
|
7800 | TP
| 2019-04-07 09:37:40 | 2019-04-07 09:38:17 |
1 |
+------+--------------+--------------+--------+--------+---------------------+---------------------+-----------+
17 rows in set (0.00 sec)

4.在目标端创建库、创建表

CREATE DATABASE testdb;
use testdb;
CREATE TABLE `t1` (
`id` int(11) DEFAULT NULL,
`name` varchar(100) DEFAULT NULL,
`deg` varchar(100) DEFAULT NULL,
`salary` int(11) DEFAULT NULL,
`dept` varchar(10) DEFAULT NULL,
`create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`is_delete` bigint(20) DEFAULT '1'
) ENGINE=InnoDB

5.新建mysql2msql的json,此为全量同步

[root@db01 job]# cat mysql2mysql.json
{
"job": {
"setting": {
"speed": {
"channel":1
}
},
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"username": "root",
"password": "wwwwww",
"connection": [
{
"querySql": [
"select id,name,deg,salary,create_time,update_time from t1;"
],
"jdbcUrl": [
"jdbc:mysql://192.168.56.199:3306/testdb"
]
}
]
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"writeMode": "insert",
"username": "root",
"password": "wwwwww",
"column": [
"id",
"name",
"deg",
"salary","create_time","update_time"
],
"session": [
"set session sql_mode='ANSI'"
],
"preSql": [
"delete from t1"
],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.56.199:3306/testdb1?useUnicode=true&characterEncoding=utf-8",
"table": [
"t1"
]
}
]
}
}
}
]
}
}

6. 执行如下命令,完成全量数据导入

[root@db01 bin]# python datax.py ../job/mysql2mysql.json

7. 同步增量数据

在原表插入数据
mysql> insert
into `t1`(`id`,`name`,`deg`,`salary`,`dept`,`create_time`,`update_time`,`is_delete`) values (12012,'gopal11','manager11',500000,'TP',CURRENT_TIMESTAMP,CURRENT_TIMESTAMP,1);

8.在datax端执行命令

Mysql2mysql增量json文件

[root@db01 bin]#
cat ../job/mysql2mysql.json
{
"job": {
"setting": {
"speed": {
"channel":1
}
},
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"username": "root",
"password": "wwwwww",
"connection": [
{
"querySql": [
"select id,name,deg,salary,create_time,update_time from t1 where create_time > '${start_time}' and create_time < '${end_time}';"
],
"jdbcUrl": [
"jdbc:mysql://192.168.56.199:3306/testdb"
]
}
]
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"writeMode": "insert",
"username": "root",
"password": "wwwwww",
"column": [
"id",
"name",
"deg",
"salary","create_time","update_time"
],
"session": [
"set session sql_mode='ANSI'"
],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.56.199:3306/testdb1?useUnicode=true&characterEncoding=utf-8",
"table": [
"t1"
]
}
]
}
}
}
]
}
}

9.执行命令完成增量同步

[root@db01 bin]# python datax.py ../job/mysql2mysql.json -p "-Dstart_time='2021-07-22 00:00:00' -Dend_time='2021-07-22 23:59:59'"

最后

以上就是动人皮带为你收集整理的使用datax进行增量复制1.环境准备2 驱动包准备3. 在源端创建库、创建表、插入数据4.在目标端创建库、创建表5.新建mysql2msql的json,此为全量同步6. 执行如下命令,完成全量数据导入7. 同步增量数据8.在datax端执行命令9.执行命令完成增量同步的全部内容,希望文章能够帮你解决使用datax进行增量复制1.环境准备2 驱动包准备3. 在源端创建库、创建表、插入数据4.在目标端创建库、创建表5.新建mysql2msql的json,此为全量同步6. 执行如下命令,完成全量数据导入7. 同步增量数据8.在datax端执行命令9.执行命令完成增量同步所遇到的程序开发问题。

如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(56)

评论列表共有 0 条评论

立即
投稿
返回
顶部