修炼之旅技术方案MYSQL数据库

利用Binlog和Kafka实时同步mysql数据到Elasti

2019-03-20  本文已影响480人  Liuzz25

目录

1、利用Binlog和Kafka实时同步mysql数据到Elasticsearch(一) - 开启Binlog日志
2、利用Binlog和Kafka实时同步mysql数据到Elasticsearch(二) - 安装并运行Kafka
3、利用Binlog和Kafka实时同步mysql数据到Elasticsearch(三) - Binlog日志生产消息到Kafka
4、利用Binlog和Kafka实时同步mysql数据到Elasticsearch(四) - 消费Kafka消息同步数据到ES


前言

- 项目模块

BinlogMiddleware

1、binlog中间件,负责解析binlog,把变动的数据以json形式发送到kafka队列。

KafkaMiddleware

2、kafka中间件,负责消费kafka队列中的Message,把数据写入Elasticsearch中。

- 基础服务

(1)Mysql
(2)Kafka(用于存放mysql变动消息,存放于Kafka队列)
(3)Elasticsearch

- 项目源码

码云:https://gitee.com/OrgXxxx/SyncMysqlToElasticsearch

简介:

BinlogMiddleware服务主要负责监听Binlog日志,并将其发送到Kafka队列(及Kafka生产者)。

{"event":"teemoliu.user.update","value":[1,"TeemoLiu"]}
{"event":"teemoliu.role.insert","value":[1,"管理员"]}

1、创建SpringBoot项目。

image.png

2、导入maven引用。

<dependency>
  <groupId>com.github.shyiko</groupId>
  <artifactId>mysql-binlog-connector-java</artifactId>
  <version>0.16.1</version>
</dependency>
<dependency>
  <groupId>com.alibaba</groupId>
  <artifactId>fastjson</artifactId>
  <version>1.2.49</version>
</dependency>
<dependency>
  <groupId>org.springframework.kafka</groupId>
  <artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
  <groupId>org.apache.kafka</groupId>
  <artifactId>kafka-clients</artifactId>
  <version>1.1.1</version>
</dependency>

3、配置文件如下:

# 停用服务端口
spring.main.web-environment=false

# binlog配置
server.id=1
binlog.host=localhost
binlog.port=3306
binlog.user=root
binlog.password=root
# 指定监听的表格
binlog.database.table=teemoliu.user,teemoliu.role

# kafka
spring.kafka.bootstrap-servers=localhost:9092
kafka.topic=binlog
kafka.partNum=3
kafka.repeatNum=1

4、创建Binlog数据传输对象

public class BinlogDto {
    private String event;
    private Object value;

    public BinlogDto(String event, Object value) {
        this.event = event;
        this.value = value;
    }

    public BinlogDto() {
    }

    public String getEvent() {
        return event;
    }

    public void setEvent(String event) {
        this.event = event;
    }

    public Object getValue() {
        return value;
    }

    public void setValue(Object value) {
        this.value = value;
    }
}

5、创建Kafka数据传输对象

public class Message {
    private Long id;
    private String msg;
    private Date sendTime;

    public Message(Long id, String msg, Date sendTime) {
        this.id = id;
        this.msg = msg;
        this.sendTime = sendTime;
    }

    public Message() {
    }

    public Long getId() {
        return id;
    }

    public void setId(Long id) {
        this.id = id;
    }

    public String getMsg() {
        return msg;
    }

    public void setMsg(String msg) {
        this.msg = msg;
    }

    public Date getSendTime() {
        return sendTime;
    }

    public void setSendTime(Date sendTime) {
        this.sendTime = sendTime;
    }
}

6、binlog监听BinlogClientRunner

@Component
public class BinlogClientRunner implements CommandLineRunner {

    @Value("${binlog.host}")
    private String host;

    @Value("${binlog.port}")
    private int port;

    @Value("${binlog.user}")
    private String user;

    @Value("${binlog.password}")
    private String password;

    // binlog server_id
    @Value("${server.id}")
    private long serverId;

    // kafka话题
    @Value("${kafka.topic}")
    private String topic;

    // kafka分区
    @Value("${kafka.partNum}")
    private int partNum;

    // Kafka备份数
    @Value("${kafka.repeatNum}")
    private short repeatNum;

    // kafka地址
    @Value("${spring.kafka.bootstrap-servers}")
    private String kafkaHost;

    // 指定监听的数据表
    @Value("${binlog.database.table}")
    private String database_table;

    @Autowired
    KafkaSender kafkaSender;

    @Async
    @Override
    public void run(String... args) throws Exception {

        // 创建topic
        kafkaSender.createTopic(kafkaHost, topic, partNum, repeatNum);
        // 获取监听数据表数组
        List<String> databaseList = Arrays.asList(database_table.split(","));
        HashMap<Long, String> tableMap = new HashMap<Long, String>();
        // 创建binlog监听客户端
        BinaryLogClient client = new BinaryLogClient(host, port, user, password);
        client.setServerId(serverId);
        client.registerEventListener((event -> {
            // binlog事件
            EventData data = event.getData();
            if (data != null) {
                if (data instanceof TableMapEventData) {
                    TableMapEventData tableMapEventData = (TableMapEventData) data;
                    tableMap.put(tableMapEventData.getTableId(), tableMapEventData.getDatabase() + "." + tableMapEventData.getTable());
                }
                // update数据
                if (data instanceof UpdateRowsEventData) {
                    UpdateRowsEventData updateRowsEventData = (UpdateRowsEventData) data;
                    String tableName = tableMap.get(updateRowsEventData.getTableId());
                    if (tableName != null && databaseList.contains(tableName)) {
                        String eventKey = tableName + ".update";
                        for (Map.Entry<Serializable[], Serializable[]> row : updateRowsEventData.getRows()) {
                            String msg = JSON.toJSONString(new BinlogDto(eventKey, row.getValue()));
                            kafkaSender.send(topic, msg);
                        }
                    }
                }
                // insert数据
                else if (data instanceof WriteRowsEventData) {
                    WriteRowsEventData writeRowsEventData = (WriteRowsEventData) data;
                    String tableName = tableMap.get(writeRowsEventData.getTableId());
                    if (tableName != null && databaseList.contains(tableName)) {
                        String eventKey = tableName + ".insert";
                        for (Serializable[] row : writeRowsEventData.getRows()) {
                            String msg = JSON.toJSONString(new BinlogDto(eventKey, row));
                            kafkaSender.send(topic, msg);
                        }
                    }
                }
                // delete数据
                else if (data instanceof DeleteRowsEventData) {
                    DeleteRowsEventData deleteRowsEventData = (DeleteRowsEventData) data;
                    String tableName = tableMap.get(deleteRowsEventData.getTableId());
                    if (tableName != null && databaseList.contains(tableName)) {
                        String eventKey = tableName + ".delete";
                        for (Serializable[] row : deleteRowsEventData.getRows()) {
                            String msg = JSON.toJSONString(new BinlogDto(eventKey, row));
                            kafkaSender.send(topic, msg);
                        }
                    }
                }
            }
        }));
        client.connect();
    }
}
上一篇下一篇

猜你喜欢

热点阅读