目录
前言
- 项目模块
BinlogMiddleware
1、binlog中间件,负责解析binlog,把变动的数据以json形式发送到kafka队列。
KafkaMiddleware
2、kafka中间件,负责消费kafka队列中的Message,把数据写入Elasticsearch中。
- 基础服务
(1)Mysql
(2)Kafka(用于存放mysql变动消息,存放于Kafka队列)
(3)Elasticsearch
- 项目源码
简介:
BinlogMiddleware服务主要负责监听Binlog日志,并将其发送到Kafka队列(及Kafka生产者)。
本示例模拟监听teemoliu数据库的user、role表。为了方便表结构设计的很简单,均只含有id、name两个属性。
中间件写进Kafka队列的消息格式如下:
{"event":"teemoliu.user.update","value":[1,"TeemoLiu"]}
{"event":"teemoliu.role.insert","value":[1,"管理员"]}
项目结构如下:
![](https://img.php1.cn/3cd4a/1eebe/cd5/7cccb7e4b6cb5cb8.webp</p><p>1、创建SpringBoot项目。</p><p><img src=)
2、导入maven引用。
com.github.shyiko
mysql-binlog-connector-java
0.16.1
com.alibaba
fastjson
1.2.49
org.springframework.kafka
spring-kafka
org.apache.kafka
kafka-clients
1.1.1
3、配置文件如下&#xff1a;
# 停用服务端口
spring.main.web-environment&#61;false
# binlog配置
server.id&#61;1
binlog.host&#61;localhost
binlog.port&#61;3306
binlog.user&#61;root
binlog.password&#61;root
# 指定监听的表格
binlog.database.table&#61;teemoliu.user,teemoliu.role
# kafka
spring.kafka.bootstrap-servers&#61;localhost:9092
kafka.topic&#61;binlog
kafka.partNum&#61;3
kafka.repeatNum&#61;1
4、创建Binlog数据传输对象
public class BinlogDto {
private String event;
private Object value;
public BinlogDto(String event, Object value) {
this.event &#61; event;
this.value &#61; value;
}
public BinlogDto() {
}
public String getEvent() {
return event;
}
public void setEvent(String event) {
this.event &#61; event;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value &#61; value;
}
}
5、创建Kafka数据传输对象
public class Message {
private Long id;
private String msg;
private Date sendTime;
public Message(Long id, String msg, Date sendTime) {
this.id &#61; id;
this.msg &#61; msg;
this.sendTime &#61; sendTime;
}
public Message() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id &#61; id;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg &#61; msg;
}
public Date getSendTime() {
return sendTime;
}
public void setSendTime(Date sendTime) {
this.sendTime &#61; sendTime;
}
}
6、binlog监听BinlogClientRunner
&#64;Component
public class BinlogClientRunner implements CommandLineRunner {
&#64;Value("${binlog.host}")
private String host;
&#64;Value("${binlog.port}")
private int port;
&#64;Value("${binlog.user}")
private String user;
&#64;Value("${binlog.password}")
private String password;
// binlog server_id
&#64;Value("${server.id}")
private long serverId;
// kafka话题
&#64;Value("${kafka.topic}")
private String topic;
// kafka分区
&#64;Value("${kafka.partNum}")
private int partNum;
// Kafka备份数
&#64;Value("${kafka.repeatNum}")
private short repeatNum;
// kafka地址
&#64;Value("${spring.kafka.bootstrap-servers}")
private String kafkaHost;
// 指定监听的数据表
&#64;Value("${binlog.database.table}")
private String database_table;
&#64;Autowired
KafkaSender kafkaSender;
&#64;Async
&#64;Override
public void run(String... args) throws Exception {
// 创建topic
kafkaSender.createTopic(kafkaHost, topic, partNum, repeatNum);
// 获取监听数据表数组
List databaseList &#61; Arrays.asList(database_table.split(","));
HashMap tableMap &#61; new HashMap();
// 创建binlog监听客户端
BinaryLogClient client &#61; new BinaryLogClient(host, port, user, password);
client.setServerId(serverId);
client.registerEventListener((event -> {
// binlog事件
EventData data &#61; event.getData();
if (data !&#61; null) {
if (data instanceof TableMapEventData) {
TableMapEventData tableMapEventData &#61; (TableMapEventData) data;
tableMap.put(tableMapEventData.getTableId(), tableMapEventData.getDatabase() &#43; "." &#43; tableMapEventData.getTable());
}
// update数据
if (data instanceof UpdateRowsEventData) {
UpdateRowsEventData updateRowsEventData &#61; (UpdateRowsEventData) data;
String tableName &#61; tableMap.get(updateRowsEventData.getTableId());
if (tableName !&#61; null && databaseList.contains(tableName)) {
String eventKey &#61; tableName &#43; ".update";
for (Map.Entry row : updateRowsEventData.getRows()) {
String msg &#61; JSON.toJSONString(new BinlogDto(eventKey, row.getValue()));
kafkaSender.send(topic, msg);
}
}
}
// insert数据
else if (data instanceof WriteRowsEventData) {
WriteRowsEventData writeRowsEventData &#61; (WriteRowsEventData) data;
String tableName &#61; tableMap.get(writeRowsEventData.getTableId());
if (tableName !&#61; null && databaseList.contains(tableName)) {
String eventKey &#61; tableName &#43; ".insert";
for (Serializable[] row : writeRowsEventData.getRows()) {
String msg &#61; JSON.toJSONString(new BinlogDto(eventKey, row));
kafkaSender.send(topic, msg);
}
}
}
// delete数据
else if (data instanceof DeleteRowsEventData) {
DeleteRowsEventData deleteRowsEventData &#61; (DeleteRowsEventData) data;
String tableName &#61; tableMap.get(deleteRowsEventData.getTableId());
if (tableName !&#61; null && databaseList.contains(tableName)) {
String eventKey &#61; tableName &#43; ".delete";
for (Serializable[] row : deleteRowsEventData.getRows()) {
String msg &#61; JSON.toJSONString(new BinlogDto(eventKey, row));
kafkaSender.send(topic, msg);
}
}
}
}
}));
client.connect();
}
}