Browse Source

Merge remote-tracking branch 'remotes/origin/master' into fuxinpeng-beta

AE86 2 years ago
parent
commit
e091c339f7
100 changed files with 3873 additions and 2930 deletions
  1. 7 2
      README.md
  2. 4 0
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/connector/DqlPostgreSQLConfigChecker.java
  3. 2 2
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/ConfigServiceImpl.java
  4. 4 1
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MonitorServiceImpl.java
  5. 20 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/column/AbstractColumnValue.java
  6. 40 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/column/ColumnValue.java
  7. 79 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/config/BinlogRecorderConfig.java
  8. 66 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/config/BufferActuatorConfig.java
  9. 5 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/event/Event.java
  10. 14 43
      dbsyncer-common/src/main/java/org/dbsyncer/common/event/RowChangedEvent.java
  11. 21 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/model/ConvertContext.java
  12. 38 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/model/FullConvertContext.java
  13. 47 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/model/IncrementConvertContext.java
  14. 6 9
      dbsyncer-common/src/main/java/org/dbsyncer/common/spi/ConvertService.java
  15. 13 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/spi/ProxyApplicationContext.java
  16. 4 16
      dbsyncer-common/src/main/java/org/dbsyncer/common/util/DateFormatUtil.java
  17. 8 1
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/config/SqlBuilderConfig.java
  18. 68 33
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/AbstractDatabaseConnector.java
  19. 4 2
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/BlobSetter.java
  20. 8 4
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/ClobSetter.java
  21. 1 3
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/NClobSetter.java
  22. 30 10
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/PreparedFieldMapper.java
  23. 11 0
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/SmallintSetter.java
  24. 1 5
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderDelete.java
  25. 1 1
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderInsert.java
  26. 1 1
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderQuery.java
  27. 1 1
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderUpdate.java
  28. 8 3
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/mysql/MysqlConnector.java
  29. 16 0
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/oracle/OracleConnector.java
  30. 16 1
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/postgresql/PostgreSQLConnector.java
  31. 2 2
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/sql/AbstractDQLConnector.java
  32. 10 31
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/sqlserver/SqlServerConnector.java
  33. 18 0
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/util/DatabaseUtil.java
  34. 45 1
      dbsyncer-connector/src/main/test/SqlServerConnectionTest.java
  35. 1 2
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/AbstractDatabaseExtractor.java
  36. 1 2
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/file/FileExtractor.java
  37. 5 20
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/mysql/MysqlExtractor.java
  38. 3 3
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/oracle/dcn/DBChangeNotification.java
  39. 3 4
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/AbstractMessageDecoder.java
  40. 0 182
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/AbstractColumnValue.java
  41. 0 73
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/ColumnValue.java
  42. 167 20
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/PgColumnValue.java
  43. 4 6
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/decoder/PgOutputMessageDecoder.java
  44. 1 4
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/decoder/TestDecodingMessageDecoder.java
  45. 4 5
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/quartz/AbstractQuartzExtractor.java
  46. 17 21
      dbsyncer-listener/src/main/java/org/dbsyncer/listener/sqlserver/SqlServerExtractor.java
  47. 1 1
      dbsyncer-listener/src/main/test/DBChangeNotificationTest.java
  48. 16 14
      dbsyncer-manager/src/main/java/org/dbsyncer/manager/puller/IncrementPuller.java
  49. 1 1
      dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/Monitor.java
  50. 21 17
      dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/MonitorFactory.java
  51. 43 0
      dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/enums/TaskMetricEnum.java
  52. 98 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/AbstractWriterBinlog.java
  53. 14 3
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ParserFactory.java
  54. 67 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/convert/handler/StringToTimestampHandler.java
  55. 6 6
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/convert/handler/TimestampToChineseStandardTimeHandler.java
  56. 6 2
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/enums/ConvertEnum.java
  57. 14 25
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferActuator.java
  58. 2 7
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractFlushStrategy.java
  59. 7 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/BufferActuator.java
  60. 5 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/StorageBufferActuator.java
  61. 29 17
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/WriterBufferActuator.java
  62. 5 60
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/AbstractWriter.java
  63. 4 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Picker.java
  64. 10 13
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterRequest.java
  65. 3 2
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterResponse.java
  66. 16 4
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/ParserStrategy.java
  67. 7 53
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/DisableWriterBufferActuatorStrategy.java
  68. 21 25
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/EnableWriterBufferActuatorStrategy.java
  69. 8 2
      dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/PluginFactory.java
  70. 250 0
      dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/proxy/ProxyApplicationContextImpl.java
  71. 5 6
      dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/service/DemoConvertServiceImpl.java
  72. 38 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/AbstractBinlogActuator.java
  73. 177 59
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/AbstractBinlogRecorder.java
  74. 30 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogActuator.java
  75. 98 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogColumnValue.java
  76. 0 178
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogContext.java
  77. 0 60
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogPipeline.java
  78. 32 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogRecorder.java
  79. 782 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMap.java
  80. 50 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMapOrBuilder.java
  81. 577 748
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessage.java
  82. 47 56
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessageOrBuilder.java
  83. 67 66
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessageProto.java
  84. 0 782
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/Data.java
  85. 0 50
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/DataOrBuilder.java
  86. 5 5
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/EventEnum.java
  87. 24 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/constant/BinlogConstant.java
  88. 18 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/enums/BinlogStatusEnum.java
  89. 24 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/enums/IndexFieldResolverEnum.java
  90. 9 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/lucene/IndexFieldResolver.java
  91. 16 12
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/lucene/Shard.java
  92. 4 4
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/model/BinlogConfig.java
  93. 70 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/model/BinlogIndex.java
  94. 22 5
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/query/Option.java
  95. 5 5
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/support/DiskStorageServiceImpl.java
  96. 32 49
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/support/MysqlStorageServiceImpl.java
  97. 239 0
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/util/BinlogMessageUtil.java
  98. 21 2
      dbsyncer-storage/src/main/java/org/dbsyncer/storage/util/DocumentUtil.java
  99. 2 2
      dbsyncer-storage/src/main/proto/BinlogMessageProto.proto
  100. 0 75
      dbsyncer-storage/src/main/test/BinlogMessageTest.java

+ 7 - 2
README.md

@@ -27,7 +27,7 @@ DBSyncer是一款开源的数据同步中间件,提供Mysql、Oracle、SqlServ
                 <td>Oracle</td>
                 <td>✔</td>
                 <td>✔</td>
-                <td>10g以上</td>
+                <td>Oracle 10gR2 -11g</td>
             </tr>
             <tr>
                 <td>SqlServer</td>
@@ -100,10 +100,15 @@ replicate-do-db=test
 
 ##### Oracle
 * CDN注册订阅。监听增删改事件,得到rowid,根据rowid执行SQL查询,得到变化数据。
-> 授予账号监听权限, 同时要求目标源表必须定义一个长度为18的varchar字段,通过接收rowid值实现增删改操作。
+> 1、授予账号监听权限, 同时要求目标源表必须定义一个长度为18的varchar字段,通过接收rowid值实现增删改操作。
 ```roomsql
 grant change notification to 你的账号
 ```
+> 2、账号必须是监听表的OWNER
+```roomsql
+SELECT OBJECT_ID, OBJECT_NAME, OWNER FROM ALL_OBJECTS WHERE OBJECT_TYPE = 'TABLE' AND OWNER='你的账号';
+```
+![DCN账号](https://images.gitee.com/uploads/images/2022/0717/001127_fb4049b6_376718.png "DCN账号.png")
 
 ##### SqlServer
 * SQL Server 2008提供了内建的方法变更数据捕获(Change Data Capture 即CDC)以实现异步跟踪用户表的数据修改。

+ 4 - 0
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/connector/DqlPostgreSQLConfigChecker.java

@@ -1,5 +1,6 @@
 package org.dbsyncer.biz.checker.impl.connector;
 
+import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.config.DatabaseConfig;
 import org.springframework.stereotype.Component;
 
@@ -18,5 +19,8 @@ public class DqlPostgreSQLConfigChecker extends AbstractDataBaseConfigChecker {
         super.modify(connectorConfig, params);
         super.modifyDql(connectorConfig, params);
         super.modifySchema(connectorConfig, params);
+
+        connectorConfig.getProperties().put("dropSlotOnClose", StringUtil.isNotBlank(params.get("dropSlotOnClose")) ? "true" : "false");
+        connectorConfig.getProperties().put("pluginName", params.get("pluginName"));
     }
 }

+ 2 - 2
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/ConfigServiceImpl.java

@@ -2,7 +2,7 @@ package org.dbsyncer.biz.impl;
 
 import org.apache.commons.io.FileUtils;
 import org.dbsyncer.biz.ConfigService;
-import org.dbsyncer.biz.checker.impl.config.ConfigChecker;
+import org.dbsyncer.biz.checker.Checker;
 import org.dbsyncer.biz.vo.ConfigVo;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.manager.Manager;
@@ -39,7 +39,7 @@ public class ConfigServiceImpl implements ConfigService {
     private Manager manager;
 
     @Autowired
-    private ConfigChecker configChecker;
+    private Checker configChecker;
 
     @Autowired
     private PreloadTemplate preloadTemplate;

+ 4 - 1
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MonitorServiceImpl.java

@@ -12,6 +12,7 @@ import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.monitor.Monitor;
 import org.dbsyncer.monitor.enums.DiskMetricEnum;
 import org.dbsyncer.monitor.enums.MetricEnum;
+import org.dbsyncer.monitor.enums.TaskMetricEnum;
 import org.dbsyncer.monitor.enums.ThreadPoolMetricEnum;
 import org.dbsyncer.monitor.model.AppReportMetric;
 import org.dbsyncer.monitor.model.MetricResponse;
@@ -47,6 +48,8 @@ public class MonitorServiceImpl implements MonitorService {
 
     @PostConstruct
     private void init() {
+        metricDetailFormatterMap.putIfAbsent(TaskMetricEnum.STORAGE_ACTIVE.getCode(), new ValueMetricDetailFormatter());
+        metricDetailFormatterMap.putIfAbsent(TaskMetricEnum.STORAGE_REMAINING_CAPACITY.getCode(), new ValueMetricDetailFormatter());
         metricDetailFormatterMap.putIfAbsent(ThreadPoolMetricEnum.TASK_SUBMITTED.getCode(), new ValueMetricDetailFormatter());
         metricDetailFormatterMap.putIfAbsent(ThreadPoolMetricEnum.QUEUE_UP.getCode(), new ValueMetricDetailFormatter());
         metricDetailFormatterMap.putIfAbsent(ThreadPoolMetricEnum.ACTIVE.getCode(), new ValueMetricDetailFormatter());
@@ -167,7 +170,7 @@ public class MonitorServiceImpl implements MonitorService {
 
     private List<MetricResponseVo> getMetrics(List<MetricResponse> metrics) {
         // 线程池状态
-        List<MetricResponse> metricList = monitor.getThreadPoolInfo();
+        List<MetricResponse> metricList = monitor.getMetricInfo();
         // 系统指标
         metricList.addAll(metrics);
 

+ 20 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/column/AbstractColumnValue.java

@@ -0,0 +1,20 @@
+package org.dbsyncer.common.column;
+
+public abstract class AbstractColumnValue<T> implements ColumnValue {
+
+    protected Object value;
+
+    protected T getValue() {
+        return (T) value;
+    }
+
+    public void setValue(T value) {
+        this.value = value;
+    }
+
+    @Override
+    public boolean isNull() {
+        return value == null;
+    }
+
+}

+ 40 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/column/ColumnValue.java

@@ -0,0 +1,40 @@
+package org.dbsyncer.common.column;
+
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/4/22 22:39
+ */
+public interface ColumnValue {
+
+    boolean isNull();
+
+    String asString();
+
+    byte[] asByteArray();
+
+    Short asShort();
+
+    Integer asInteger();
+
+    Long asLong();
+
+    Float asFloat();
+
+    Double asDouble();
+
+    Boolean asBoolean();
+
+    BigDecimal asBigDecimal();
+
+    Date asDate();
+
+    Timestamp asTimestamp();
+
+    Time asTime();
+}

+ 79 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/config/BinlogRecorderConfig.java

@@ -0,0 +1,79 @@
+package org.dbsyncer.common.config;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/7/14 23:50
+ */
+@Configuration
+@ConfigurationProperties(prefix = "dbsyncer.storage.binlog.recorder")
+public class BinlogRecorderConfig {
+
+    /**
+     * 批量同步数
+     */
+    private int batchCount = 1000;
+
+    /**
+     * 最长任务处理耗时(秒)
+     */
+    private int maxProcessingSeconds = 120;
+
+    /**
+     * 工作线任务队列
+     */
+    private int queueCapacity = 10000;
+
+    /**
+     * 写磁盘间隔(毫秒)
+     */
+    private int writerPeriodMillisecond = 500;
+
+    /**
+     * 读磁盘间隔(毫秒)
+     */
+    private int readerPeriodMillisecond = 2000;
+
+    public int getBatchCount() {
+        return batchCount;
+    }
+
+    public void setBatchCount(int batchCount) {
+        this.batchCount = batchCount;
+    }
+
+    public int getMaxProcessingSeconds() {
+        return maxProcessingSeconds;
+    }
+
+    public void setMaxProcessingSeconds(int maxProcessingSeconds) {
+        this.maxProcessingSeconds = maxProcessingSeconds;
+    }
+
+    public int getQueueCapacity() {
+        return queueCapacity;
+    }
+
+    public void setQueueCapacity(int queueCapacity) {
+        this.queueCapacity = queueCapacity;
+    }
+
+    public int getWriterPeriodMillisecond() {
+        return writerPeriodMillisecond;
+    }
+
+    public void setWriterPeriodMillisecond(int writerPeriodMillisecond) {
+        this.writerPeriodMillisecond = writerPeriodMillisecond;
+    }
+
+    public int getReaderPeriodMillisecond() {
+        return readerPeriodMillisecond;
+    }
+
+    public void setReaderPeriodMillisecond(int readerPeriodMillisecond) {
+        this.readerPeriodMillisecond = readerPeriodMillisecond;
+    }
+}

+ 66 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/config/BufferActuatorConfig.java

@@ -0,0 +1,66 @@
+package org.dbsyncer.common.config;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/7/14 23:50
+ */
+@Configuration
+@ConfigurationProperties(prefix = "dbsyncer.parser.flush.buffer.actuator")
+public class BufferActuatorConfig {
+
+    /**
+     * 写批量数
+     */
+    private int writerBatchCount = 100;
+
+    /**
+     * 批量同步数
+     */
+    private int batchCount = 2000;
+
+    /**
+     * 工作线任务队列
+     */
+    private int queueCapacity = 5_0000;
+
+    /**
+     * 同步间隔(毫秒)
+     */
+    private int periodMillisecond = 300;
+
+    public int getWriterBatchCount() {
+        return writerBatchCount;
+    }
+
+    public void setWriterBatchCount(int writerBatchCount) {
+        this.writerBatchCount = writerBatchCount;
+    }
+
+    public int getBatchCount() {
+        return batchCount;
+    }
+
+    public void setBatchCount(int batchCount) {
+        this.batchCount = batchCount;
+    }
+
+    public int getQueueCapacity() {
+        return queueCapacity;
+    }
+
+    public void setQueueCapacity(int queueCapacity) {
+        this.queueCapacity = queueCapacity;
+    }
+
+    public int getPeriodMillisecond() {
+        return periodMillisecond;
+    }
+
+    public void setPeriodMillisecond(int periodMillisecond) {
+        this.periodMillisecond = periodMillisecond;
+    }
+}

+ 5 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/event/Event.java

@@ -30,6 +30,11 @@ public interface Event {
      */
     void forceFlushEvent(Map<String,String> map);
 
+    /**
+     * 刷新事件变更时间
+     */
+    void refreshFlushEventUpdateTime();
+
     /**
      * 异常事件
      *

+ 14 - 43
dbsyncer-common/src/main/java/org/dbsyncer/common/event/RowChangedEvent.java

@@ -19,25 +19,20 @@ public class RowChangedEvent {
 
     private int tableGroupIndex;
     private String sourceTableName;
-    private String targetTableName;
     private String event;
-    private List<Object> beforeData;
-    private List<Object> afterData;
-    private Map<String, Object> before;
-    private Map<String, Object> after;
+    private List<Object> dataList;
+    private Map<String, Object> dataMap;
 
-    public RowChangedEvent(int tableGroupIndex, String event, Map<String, Object> before, Map<String, Object> after) {
+    public RowChangedEvent(int tableGroupIndex, String event, Map<String, Object> data) {
         this.tableGroupIndex = tableGroupIndex;
         this.event = event;
-        this.before = before;
-        this.after = after;
+        this.dataMap = data;
     }
 
-    public RowChangedEvent(String sourceTableName, String event, List<Object> beforeData, List<Object> afterData) {
+    public RowChangedEvent(String sourceTableName, String event, List<Object> data) {
         this.sourceTableName = sourceTableName;
         this.event = event;
-        this.beforeData = beforeData;
-        this.afterData = afterData;
+        this.dataList = data;
     }
 
     public int getTableGroupIndex() {
@@ -52,48 +47,24 @@ public class RowChangedEvent {
         this.sourceTableName = sourceTableName;
     }
 
-    public String getTargetTableName() {
-        return targetTableName;
-    }
-
-    public void setTargetTableName(String targetTableName) {
-        this.targetTableName = targetTableName;
-    }
-
     public String getEvent() {
         return event;
     }
 
-    public List<Object> getBeforeData() {
-        return beforeData;
-    }
-
-    public void setBeforeData(List<Object> beforeData) {
-        this.beforeData = beforeData;
-    }
-
-    public List<Object> getAfterData() {
-        return afterData;
-    }
-
-    public void setAfterData(List<Object> afterData) {
-        this.afterData = afterData;
-    }
-
-    public Map<String, Object> getBefore() {
-        return before;
+    public List<Object> getDataList() {
+        return dataList;
     }
 
-    public void setBefore(Map<String, Object> before) {
-        this.before = before;
+    public void setDataList(List<Object> dataList) {
+        this.dataList = dataList;
     }
 
-    public Map<String, Object> getAfter() {
-        return after;
+    public Map<String, Object> getDataMap() {
+        return dataMap;
     }
 
-    public void setAfter(Map<String, Object> after) {
-        this.after = after;
+    public void setDataMap(Map<String, Object> dataMap) {
+        this.dataMap = dataMap;
     }
 
     @Override

+ 21 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/model/ConvertContext.java

@@ -0,0 +1,21 @@
+package org.dbsyncer.common.model;
+
+import org.dbsyncer.common.spi.ProxyApplicationContext;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 16:00
+ */
+public class ConvertContext {
+
+    /**
+     * Spring上下文
+     */
+    protected ProxyApplicationContext context;
+
+    public ProxyApplicationContext getContext() {
+        return context;
+    }
+
+}

+ 38 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/model/FullConvertContext.java

@@ -0,0 +1,38 @@
+package org.dbsyncer.common.model;
+
+import org.dbsyncer.common.spi.ProxyApplicationContext;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 16:04
+ */
+public class FullConvertContext extends ConvertContext {
+
+    /**
+     * 全量同步,数据源数据集合
+     */
+    private List<Map> sourceList;
+
+    /**
+     * 全量同步,目标源源数据集合
+     */
+    private List<Map> targetList;
+
+    public FullConvertContext(ProxyApplicationContext context, List<Map> sourceList, List<Map> targetList) {
+        this.context = context;
+        this.sourceList = sourceList;
+        this.targetList = targetList;
+    }
+
+    public List<Map> getSourceList() {
+        return sourceList;
+    }
+
+    public List<Map> getTargetList() {
+        return targetList;
+    }
+}

+ 47 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/model/IncrementConvertContext.java

@@ -0,0 +1,47 @@
+package org.dbsyncer.common.model;
+
+import org.dbsyncer.common.spi.ProxyApplicationContext;
+
+import java.util.Map;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 16:06
+ */
+public class IncrementConvertContext extends ConvertContext {
+
+    /**
+     * 增量同步,事件(INSERT/UPDATE/DELETE)
+     */
+    private String event;
+
+    /**
+     * 增量同步,数据源数据
+     */
+    private Map source;
+
+    /**
+     * 增量同步,目标源数据
+     */
+    private Map target;
+
+    public IncrementConvertContext(ProxyApplicationContext context, String event, Map source, Map target) {
+        this.context = context;
+        this.event = event;
+        this.source = source;
+        this.target = target;
+    }
+
+    public String getEvent() {
+        return event;
+    }
+
+    public Map getSource() {
+        return source;
+    }
+
+    public Map getTarget() {
+        return target;
+    }
+}

+ 6 - 9
dbsyncer-common/src/main/java/org/dbsyncer/common/spi/ConvertService.java

@@ -1,7 +1,7 @@
 package org.dbsyncer.common.spi;
 
-import java.util.List;
-import java.util.Map;
+import org.dbsyncer.common.model.FullConvertContext;
+import org.dbsyncer.common.model.IncrementConvertContext;
 
 /**
  * 插件扩展服务接口
@@ -16,19 +16,16 @@ public interface ConvertService {
     /**
      * 全量同步
      *
-     * @param source 数据源
-     * @param target 目标源
+     * @param context 上下文
      */
-    void convert(List<Map> source, List<Map> target);
+    void convert(FullConvertContext context);
 
     /**
      * 增量同步
      *
-     * @param event  事件(INSERT/UPDATE/DELETE)
-     * @param source 数据源
-     * @param target 目标源
+     * @param context 上下文
      */
-    void convert(String event, Map source, Map target);
+    void convert(IncrementConvertContext context);
 
     /**
      * 版本号

+ 13 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/spi/ProxyApplicationContext.java

@@ -0,0 +1,13 @@
+package org.dbsyncer.common.spi;
+
+import org.springframework.context.ApplicationContext;
+
+/**
+ * Spring上下文代理对象
+ *
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 15:13
+ */
+public interface ProxyApplicationContext extends ApplicationContext {
+}

+ 4 - 16
dbsyncer-common/src/main/java/org/dbsyncer/common/util/DateFormatUtil.java

@@ -55,28 +55,17 @@ public abstract class DateFormatUtil {
             .appendText(ChronoField.ERA, TextStyle.SHORT)
             .optionalEnd()
             .toFormatter();
-    private static final DateTimeFormatter TS_FORMAT = new DateTimeFormatterBuilder()
-            .append(NON_ISO_LOCAL_DATE)
-            .appendLiteral(' ')
-            .append(DateTimeFormatter.ISO_LOCAL_TIME)
-            .optionalStart()
-            .appendLiteral(" ")
-            .appendText(ChronoField.ERA, TextStyle.SHORT)
-            .optionalEnd()
-            .toFormatter();
-
-    private static ZoneId zoneId = ZoneId.systemDefault();
 
     public static String getCurrentTime() {
         return LocalDateTime.now().format(TIME_FORMATTER);
     }
 
     public static String dateToString(Date date) {
-        return date.toInstant().atZone(zoneId).toLocalDate().format(DATE_FORMATTER);
+        return date.toLocalDate().format(DATE_FORMATTER);
     }
 
-    public static String dateToChineseStandardTimeString(Date date) {
-        return date.toInstant().atZone(zoneId).toLocalDateTime().format(CHINESE_STANDARD_TIME_FORMATTER);
+    public static String timestampToString(Timestamp timestamp) {
+        return timestamp.toLocalDateTime().format(CHINESE_STANDARD_TIME_FORMATTER);
     }
 
     public static Date stringToDate(String s) {
@@ -88,9 +77,8 @@ public abstract class DateFormatUtil {
     }
 
     public static Timestamp stringToTimestamp(String s) {
-        return Timestamp.valueOf(LocalDateTime.from(TS_FORMAT.parse(s)));
+        return Timestamp.valueOf(LocalDateTime.from(CHINESE_STANDARD_TIME_FORMATTER.parse(s)));
     }
-
     public static OffsetTime timeWithTimeZone(String s) {
         return OffsetTime.parse(s, TIME_TZ_FORMAT).withOffsetSameInstant(ZoneOffset.UTC);
     }

+ 8 - 1
dbsyncer-connector/src/main/java/org/dbsyncer/connector/config/SqlBuilderConfig.java

@@ -8,6 +8,8 @@ import java.util.List;
 public class SqlBuilderConfig {
 
     private Database database;
+    // 架构名
+    private String schema;
     // 表名
     private String tableName;
     // 主键
@@ -19,8 +21,9 @@ public class SqlBuilderConfig {
     // 引号
     private String quotation;
 
-    public SqlBuilderConfig(Database database, String tableName, String pk, List<Field> fields, String queryFilter, String quotation) {
+    public SqlBuilderConfig(Database database, String schema, String tableName, String pk, List<Field> fields, String queryFilter, String quotation) {
         this.database = database;
+        this.schema = schema;
         this.tableName = tableName;
         this.pk = pk;
         this.fields = fields;
@@ -32,6 +35,10 @@ public class SqlBuilderConfig {
         return database;
     }
 
+    public String getSchema() {
+        return schema;
+    }
+
     public String getTableName() {
         return tableName;
     }

+ 68 - 33
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/AbstractDatabaseConnector.java

@@ -63,9 +63,11 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
     @Override
     public MetaInfo getMetaInfo(DatabaseConnectorMapper connectorMapper, String tableName) {
         String quotation = buildSqlWithQuotation();
-        StringBuilder queryMetaSql = new StringBuilder("SELECT * FROM ").append(quotation).append(tableName).append(quotation).append(
-                " WHERE 1 != 1");
-        return connectorMapper.execute(databaseTemplate -> getMetaInfo(databaseTemplate, queryMetaSql.toString(), tableName));
+        DatabaseConfig config = connectorMapper.getConfig();
+        String queryMetaSql = new StringBuilder("SELECT * FROM ").append(getSchema(config, quotation)).append(quotation).append(tableName)
+                .append(quotation).append(" WHERE 1!=1").toString();
+
+        return connectorMapper.execute(databaseTemplate -> getMetaInfo(databaseTemplate, queryMetaSql, config.getSchema(), tableName));
     }
 
     @Override
@@ -165,48 +167,38 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
     @Override
     public Map<String, String> getSourceCommand(CommandConfig commandConfig) {
         // 获取过滤SQL
-        List<Filter> filter = commandConfig.getFilter();
-        String queryFilterSql = getQueryFilterSql(filter);
+        final String queryFilterSql = getQueryFilterSql(commandConfig.getFilter());
+        final String quotation = buildSqlWithQuotation();
 
         // 获取查询SQL
-        Table table = commandConfig.getTable();
         Map<String, String> map = new HashMap<>();
-
-        String query = ConnectorConstant.OPERTION_QUERY;
-        map.put(query, buildSql(query, commandConfig, queryFilterSql));
-
+        String schema = getSchema((DatabaseConfig) commandConfig.getConnectorConfig(), quotation);
+        map.put(ConnectorConstant.OPERTION_QUERY, buildSql(ConnectorConstant.OPERTION_QUERY, commandConfig, schema, queryFilterSql));
         // 获取查询总数SQL
-        String quotation = buildSqlWithQuotation();
-        String pk = findOriginalTablePrimaryKey(commandConfig, quotation);
-        StringBuilder queryCount = new StringBuilder();
-        queryCount.append("SELECT COUNT(1) FROM (SELECT 1 FROM ").append(quotation).append(table.getName()).append(quotation);
-        if (StringUtil.isNotBlank(queryFilterSql)) {
-            queryCount.append(queryFilterSql);
-        }
-        queryCount.append(" GROUP BY ").append(pk).append(") DBSYNCER_T");
-        map.put(ConnectorConstant.OPERTION_QUERY_COUNT, queryCount.toString());
+        map.put(ConnectorConstant.OPERTION_QUERY_COUNT, getQueryCountSql(commandConfig, schema, quotation, queryFilterSql));
         return map;
     }
 
     @Override
     public Map<String, String> getTargetCommand(CommandConfig commandConfig) {
+        String quotation = buildSqlWithQuotation();
+        String schema = getSchema((DatabaseConfig) commandConfig.getConnectorConfig(), quotation);
+
         // 获取增删改SQL
         Map<String, String> map = new HashMap<>();
         String insert = SqlBuilderEnum.INSERT.getName();
-        map.put(insert, buildSql(insert, commandConfig, null));
+        map.put(insert, buildSql(insert, commandConfig, schema, null));
 
         String update = SqlBuilderEnum.UPDATE.getName();
-        map.put(update, buildSql(update, commandConfig, null));
+        map.put(update, buildSql(update, commandConfig, schema, null));
 
         String delete = SqlBuilderEnum.DELETE.getName();
-        map.put(delete, buildSql(delete, commandConfig, null));
+        map.put(delete, buildSql(delete, commandConfig, schema, null));
 
         // 获取查询数据行是否存在
-        String quotation = buildSqlWithQuotation();
         String pk = findOriginalTablePrimaryKey(commandConfig, quotation);
-        StringBuilder queryCount = new StringBuilder().append("SELECT COUNT(1) FROM ").append(quotation).append(
-                commandConfig.getTable().getName()).append(
-                quotation).append(" WHERE ").append(pk).append(" = ?");
+        StringBuilder queryCount = new StringBuilder().append("SELECT COUNT(1) FROM ").append(schema).append(quotation).append(
+                commandConfig.getTable().getName()).append(quotation).append(" WHERE ").append(pk).append(" = ?");
         String queryCountExist = ConnectorConstant.OPERTION_QUERY_COUNT_EXIST;
         map.put(queryCountExist, queryCount.toString());
         return map;
@@ -230,6 +222,21 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
         return "";
     }
 
+    /**
+     * 获取架构名
+     *
+     * @param config
+     * @param quotation
+     * @return
+     */
+    protected String getSchema(DatabaseConfig config, String quotation) {
+        StringBuilder schema = new StringBuilder();
+        if (StringUtil.isNotBlank(config.getSchema())) {
+            schema.append(quotation).append(config.getSchema()).append(quotation).append(".");
+        }
+        return schema.toString();
+    }
+
     /**
      * 获取表列表
      *
@@ -245,6 +252,27 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
         return Collections.EMPTY_LIST;
     }
 
+    /**
+     * 获取查询总数SQL
+     *
+     * @param commandConfig
+     * @param schema
+     * @param quotation
+     * @param queryFilterSql
+     * @return
+     */
+    protected String getQueryCountSql(CommandConfig commandConfig, String schema, String quotation, String queryFilterSql) {
+        String table = commandConfig.getTable().getName();
+        String pk = findOriginalTablePrimaryKey(commandConfig, quotation);
+        StringBuilder queryCount = new StringBuilder();
+        queryCount.append("SELECT COUNT(1) FROM (SELECT 1 FROM ").append(schema).append(quotation).append(table).append(quotation);
+        if (StringUtil.isNotBlank(queryFilterSql)) {
+            queryCount.append(queryFilterSql);
+        }
+        queryCount.append(" GROUP BY ").append(pk).append(") DBSYNCER_T");
+        return queryCount.toString();
+    }
+
     /**
      * 获取查询条件SQL
      *
@@ -317,10 +345,11 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
      *
      * @param type           {@link SqlBuilderEnum}
      * @param commandConfig
+     * @param schema
      * @param queryFilterSQL
      * @return
      */
-    protected String buildSql(String type, CommandConfig commandConfig, String queryFilterSQL) {
+    protected String buildSql(String type, CommandConfig commandConfig, String schema, String queryFilterSQL) {
         Table table = commandConfig.getTable();
         if (null == table) {
             logger.error("Table can not be null.");
@@ -359,7 +388,7 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
             pk = findOriginalTablePrimaryKey(commandConfig, "");
         }
 
-        SqlBuilderConfig config = new SqlBuilderConfig(this, tableName, pk, fields, queryFilterSQL, buildSqlWithQuotation());
+        SqlBuilderConfig config = new SqlBuilderConfig(this, schema, tableName, pk, fields, queryFilterSQL, buildSqlWithQuotation());
         return SqlBuilderEnum.getSqlBuilder(type).buildSql(config);
     }
 
@@ -368,10 +397,11 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
      *
      * @param databaseTemplate
      * @param metaSql          查询元数据
+     * @param schema           架构名
      * @param tableName        表名
      * @return
      */
-    protected MetaInfo getMetaInfo(DatabaseTemplate databaseTemplate, String metaSql, String tableName) throws SQLException {
+    protected MetaInfo getMetaInfo(DatabaseTemplate databaseTemplate, String metaSql, String schema, String tableName) throws SQLException {
         SqlRowSet sqlRowSet = databaseTemplate.queryForRowSet(metaSql);
         ResultSetWrappingSqlRowSet rowSet = (ResultSetWrappingSqlRowSet) sqlRowSet;
         SqlRowSetMetaData metaData = rowSet.getMetaData();
@@ -384,7 +414,10 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
         List<Field> fields = new ArrayList<>(columnCount);
         Map<String, List<String>> tables = new HashMap<>();
         try {
-            DatabaseMetaData md = databaseTemplate.getConnection().getMetaData();
+            Connection connection = databaseTemplate.getConnection();
+            DatabaseMetaData md = connection.getMetaData();
+            final String catalog = connection.getCatalog();
+            schema = StringUtil.isNotBlank(schema) ? schema : null;
             String name = null;
             String label = null;
             String typeName = null;
@@ -394,7 +427,7 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
             for (int i = 1; i <= columnCount; i++) {
                 table = StringUtil.isNotBlank(tableName) ? tableName : metaData.getTableName(i);
                 if (null == tables.get(table)) {
-                    tables.putIfAbsent(table, findTablePrimaryKeys(md, table));
+                    tables.putIfAbsent(table, findTablePrimaryKeys(md, catalog, schema, table));
                 }
                 name = metaData.getColumnName(i);
                 label = metaData.getColumnLabel(i);
@@ -441,16 +474,18 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector
      * 返回表主键
      *
      * @param md
+     * @param catalog
+     * @param schema
      * @param tableName
      * @return
      * @throws SQLException
      */
-    private List<String> findTablePrimaryKeys(DatabaseMetaData md, String tableName) throws SQLException {
+    private List<String> findTablePrimaryKeys(DatabaseMetaData md, String catalog, String schema, String tableName) throws SQLException {
         //根据表名获得主键结果集
         ResultSet rs = null;
         List<String> primaryKeys = new ArrayList<>();
         try {
-            rs = md.getPrimaryKeys(null, null, tableName);
+            rs = md.getPrimaryKeys(catalog, schema, tableName);
             while (rs.next()) {
                 primaryKeys.add(rs.getString("COLUMN_NAME"));
             }

+ 4 - 2
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/BlobSetter.java

@@ -16,13 +16,15 @@ public class BlobSetter extends AbstractSetter<Blob> {
 
     @Override
     protected void setIfValueTypeNotMatch(PreparedFieldMapper mapper, PreparedStatement ps, int i, int type, Object val) throws SQLException {
-        // 存放jpg等文件
         if (val instanceof Blob) {
             Blob blob = (Blob) val;
             ps.setBlob(i, blob);
             return;
         }
+        if (val instanceof byte[]) {
+            ps.setBlob(i, mapper.getBlob((byte[]) val));
+            return;
+        }
         throw new ConnectorException(String.format("BlobSetter can not find type [%s], val [%s]", type, val));
     }
-
 }

+ 8 - 4
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/ClobSetter.java

@@ -15,10 +15,14 @@ public class ClobSetter extends AbstractSetter<Clob> {
     }
 
     @Override
-    protected void setIfValueTypeNotMatch(PreparedFieldMapper mapper, PreparedStatement ps, int i, int type, Object val) throws SQLException {
-        if(val instanceof Clob) {
-            Clob clob = (Clob) val;
-            ps.setClob(i, clob);
+    protected void setIfValueTypeNotMatch(PreparedFieldMapper mapper, PreparedStatement ps, int i, int type, Object val)
+            throws SQLException {
+        if (val instanceof Clob) {
+            ps.setClob(i, (Clob) val);
+            return;
+        }
+        if (val instanceof byte[]) {
+            ps.setClob(i, mapper.getClob((byte[]) val));
             return;
         }
         throw new ConnectorException(String.format("ClobSetter can not find type [%s], val [%s]", type, val));

+ 1 - 3
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/NClobSetter.java

@@ -17,9 +17,7 @@ public class NClobSetter extends AbstractSetter<NClob> {
     @Override
     protected void setIfValueTypeNotMatch(PreparedFieldMapper mapper, PreparedStatement ps, int i, int type, Object val) throws SQLException {
         if (val instanceof byte[]) {
-            byte[] bytes = (byte[]) val;
-            NClob nClob = mapper.getNClob(bytes);
-            ps.setNClob(i, nClob);
+            ps.setNClob(i, mapper.getNClob((byte[]) val));
             return;
         }
         throw new ConnectorException(String.format("NClobSetter can not find type [%s], val [%s]", type, val));

+ 30 - 10
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/PreparedFieldMapper.java

@@ -1,26 +1,46 @@
 package org.dbsyncer.connector.database.setter;
 
 import oracle.jdbc.OracleConnection;
-import oracle.sql.NCLOB;
+import org.dbsyncer.connector.database.ds.SimpleConnection;
 
-import java.sql.Connection;
-import java.sql.NClob;
-import java.sql.SQLException;
+import java.nio.charset.Charset;
+import java.sql.*;
 
 public class PreparedFieldMapper {
 
-    private Connection connection;
+    private SimpleConnection connection;
 
     public PreparedFieldMapper(Connection connection) {
-        this.connection = connection;
+        this.connection = (SimpleConnection) connection;
     }
 
     public NClob getNClob(byte[] bytes) throws SQLException {
-        if (connection instanceof OracleConnection) {
-            OracleConnection conn = (OracleConnection) connection;
-            return new NCLOB(conn, bytes);
+        if (connection.getConnection() instanceof OracleConnection) {
+            OracleConnection conn = (OracleConnection) connection.getConnection();
+            NClob nClob = conn.createNClob();
+            nClob.setString(1, new String(bytes, Charset.defaultCharset()));
+            return nClob;
         }
         return connection.createNClob();
     }
 
-}
+    public Blob getBlob(byte[] bytes) throws SQLException {
+        if (connection.getConnection() instanceof OracleConnection) {
+            OracleConnection conn = (OracleConnection) connection.getConnection();
+            Blob blob = conn.createBlob();
+            blob.setBytes(1, bytes);
+            return blob;
+        }
+        return connection.createBlob();
+    }
+
+    public Clob getClob(byte[] bytes) throws SQLException {
+        if (connection.getConnection() instanceof OracleConnection) {
+            OracleConnection conn = (OracleConnection) connection.getConnection();
+            Clob clob = conn.createClob();
+            clob.setString(1, new String(bytes, Charset.defaultCharset()));
+            return clob;
+        }
+        return connection.createClob();
+    }
+}

+ 11 - 0
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/setter/SmallintSetter.java

@@ -1,5 +1,6 @@
 package org.dbsyncer.connector.database.setter;
 
+import org.dbsyncer.connector.ConnectorException;
 import org.dbsyncer.connector.database.AbstractSetter;
 
 import java.sql.PreparedStatement;
@@ -12,4 +13,14 @@ public class SmallintSetter extends AbstractSetter<Integer> {
         ps.setInt(i, val);
     }
 
+    @Override
+    protected void setIfValueTypeNotMatch(PreparedFieldMapper mapper, PreparedStatement ps, int i, int type, Object val)
+            throws SQLException {
+        if(val instanceof Short){
+            Short s = (Short) val;
+            ps.setShort(i, s);
+            return;
+        }
+        throw new ConnectorException(String.format("SmallintSetter can not find type [%s], val [%s]", type, val));
+    }
 }

+ 1 - 5
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderDelete.java

@@ -2,8 +2,6 @@ package org.dbsyncer.connector.database.sqlbuilder;
 
 import org.dbsyncer.connector.config.SqlBuilderConfig;
 import org.dbsyncer.connector.database.AbstractSqlBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * @author AE86
@@ -12,14 +10,12 @@ import org.slf4j.LoggerFactory;
  */
 public class SqlBuilderDelete extends AbstractSqlBuilder {
 
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
     @Override
     public String buildSql(SqlBuilderConfig config) {
         String tableName = config.getTableName();
         String quotation = config.getQuotation();
         // DELETE FROM "USER" WHERE "ID"=?
-        return new StringBuilder().append("DELETE FROM ").append(quotation).append(tableName).append(quotation).append(" WHERE ").append(quotation).append(config.getPk()).append(quotation)
+        return new StringBuilder().append("DELETE FROM ").append(config.getSchema()).append(quotation).append(tableName).append(quotation).append(" WHERE ").append(quotation).append(config.getPk()).append(quotation)
                 .append("=?").toString();
     }
 

+ 1 - 1
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderInsert.java

@@ -35,7 +35,7 @@ public class SqlBuilderInsert extends AbstractSqlBuilder {
             }
         }
         // INSERT INTO "USER"("USERNAME","AGE") VALUES (?,?)
-        sql.insert(0, "INSERT INTO ").append(quotation).append(tableName).append(quotation).append("(").append(fs).append(") VALUES (")
+        sql.insert(0, "INSERT INTO ").append(config.getSchema()).append(quotation).append(tableName).append(quotation).append("(").append(fs).append(") VALUES (")
                 .append(vs).append(")");
         return sql.toString();
     }

+ 1 - 1
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderQuery.java

@@ -53,7 +53,7 @@ public class SqlBuilderQuery extends AbstractSqlBuilder {
             }
         }
         // SELECT "ID","NAME" FROM "USER"
-        sql.insert(0, "SELECT ").append(" FROM ").append(quotation).append(tableName).append(quotation);
+        sql.insert(0, "SELECT ").append(" FROM ").append(config.getSchema()).append(quotation).append(tableName).append(quotation);
         // 解析查询条件
         if (StringUtil.isNotBlank(queryFilter)) {
             sql.append(queryFilter);

+ 1 - 1
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/sqlbuilder/SqlBuilderUpdate.java

@@ -21,7 +21,7 @@ public class SqlBuilderUpdate extends AbstractSqlBuilder {
         StringBuilder sql = new StringBuilder();
         int size = fields.size();
         int end = size - 1;
-        sql.append("UPDATE ").append(quotation).append(tableName).append(quotation).append(" SET ");
+        sql.append("UPDATE ").append(config.getSchema()).append(quotation).append(tableName).append(quotation).append(" SET ");
         for (int i = 0; i < size; i++) {
             // skip pk
             if(fields.get(i).isPk()){

+ 8 - 3
dbsyncer-connector/src/main/java/org/dbsyncer/connector/mysql/MysqlConnector.java

@@ -1,15 +1,20 @@
 package org.dbsyncer.connector.mysql;
 
-import org.dbsyncer.connector.model.PageSql;
-import org.dbsyncer.connector.model.Table;
 import org.dbsyncer.connector.constant.DatabaseConstant;
 import org.dbsyncer.connector.database.AbstractDatabaseConnector;
 import org.dbsyncer.connector.database.DatabaseConnectorMapper;
+import org.dbsyncer.connector.model.PageSql;
+import org.dbsyncer.connector.model.Table;
 
 import java.util.List;
 
 public final class MysqlConnector extends AbstractDatabaseConnector {
 
+    @Override
+    protected String buildSqlWithQuotation() {
+        return "`";
+    }
+
     @Override
     public String getPageSql(PageSql config) {
         return config.getQuerySql() + DatabaseConstant.MYSQL_PAGE_SQL;
@@ -17,7 +22,7 @@ public final class MysqlConnector extends AbstractDatabaseConnector {
 
     @Override
     public Object[] getPageArgs(int pageIndex, int pageSize) {
-        return new Object[]{(pageIndex - 1) * pageSize, pageSize};
+        return new Object[] {(pageIndex - 1) * pageSize, pageSize};
     }
 
     @Override

+ 16 - 0
dbsyncer-connector/src/main/java/org/dbsyncer/connector/oracle/OracleConnector.java

@@ -1,6 +1,9 @@
 package org.dbsyncer.connector.oracle;
 
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.connector.config.CommandConfig;
+import org.dbsyncer.connector.config.DatabaseConfig;
 import org.dbsyncer.connector.model.PageSql;
 import org.dbsyncer.connector.model.Table;
 import org.dbsyncer.connector.constant.DatabaseConstant;
@@ -43,4 +46,17 @@ public final class OracleConnector extends AbstractDatabaseConnector {
     protected String getValidationQuery() {
         return "select 1 from dual";
     }
+
+    @Override
+    protected String getQueryCountSql(CommandConfig commandConfig, String schema, String quotation, String queryFilterSql) {
+        // 有过滤条件,走默认方式
+        if (StringUtil.isNotBlank(queryFilterSql)) {
+            return super.getQueryCountSql(commandConfig, schema, quotation, queryFilterSql);
+        }
+
+        // 从系统表查询
+        final String table = commandConfig.getTable().getName();
+        DatabaseConfig cfg = (DatabaseConfig) commandConfig.getConnectorConfig();
+        return String.format("SELECT NUM_ROWS FROM ALL_TABLES WHERE OWNER = '%s' AND TABLE_NAME = '%s'", cfg.getUsername().toUpperCase(), table);
+    }
 }

+ 16 - 1
dbsyncer-connector/src/main/java/org/dbsyncer/connector/postgresql/PostgreSQLConnector.java

@@ -1,6 +1,8 @@
 package org.dbsyncer.connector.postgresql;
 
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.connector.config.CommandConfig;
 import org.dbsyncer.connector.config.DatabaseConfig;
 import org.dbsyncer.connector.model.PageSql;
 import org.dbsyncer.connector.model.Table;
@@ -46,4 +48,17 @@ public final class PostgreSQLConnector extends AbstractDatabaseConnector {
     protected String buildSqlWithQuotation() {
         return "\"";
     }
-}
+
+    @Override
+    protected String getQueryCountSql(CommandConfig commandConfig, String schema, String quotation, String queryFilterSql) {
+        // 有过滤条件,走默认方式
+        if (StringUtil.isNotBlank(queryFilterSql)) {
+            return super.getQueryCountSql(commandConfig, schema, quotation, queryFilterSql);
+        }
+
+        // 从系统表查询
+        final String table = commandConfig.getTable().getName();
+        DatabaseConfig cfg = (DatabaseConfig) commandConfig.getConnectorConfig();
+        return String.format("SELECT N_LIVE_TUP FROM PG_STAT_USER_TABLES WHERE SCHEMANAME='%s' AND RELNAME='%s'", cfg.getSchema(), table);
+    }
+}

+ 2 - 2
dbsyncer-connector/src/main/java/org/dbsyncer/connector/sql/AbstractDQLConnector.java

@@ -43,7 +43,7 @@ public abstract class AbstractDQLConnector extends AbstractDatabaseConnector {
         sql = sql.replace("\r", " ");
         sql = sql.replace("\n", " ");
         String queryMetaSql = StringUtil.contains(sql, " WHERE ") ? cfg.getSql() + " AND 1!=1 " : cfg.getSql() + " WHERE 1!=1 ";
-        return connectorMapper.execute(databaseTemplate -> super.getMetaInfo(databaseTemplate, queryMetaSql, cfg.getTable()));
+        return connectorMapper.execute(databaseTemplate -> super.getMetaInfo(databaseTemplate, queryMetaSql, cfg.getSchema(), cfg.getTable()));
     }
 
     /**
@@ -82,4 +82,4 @@ public abstract class AbstractDQLConnector extends AbstractDatabaseConnector {
         map.put(ConnectorConstant.OPERTION_QUERY_COUNT, queryCount.toString());
         return map;
     }
-}
+}

+ 10 - 31
dbsyncer-connector/src/main/java/org/dbsyncer/connector/sqlserver/SqlServerConnector.java

@@ -3,27 +3,20 @@ package org.dbsyncer.connector.sqlserver;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.config.CommandConfig;
 import org.dbsyncer.connector.config.DatabaseConfig;
-import org.dbsyncer.connector.constant.ConnectorConstant;
 import org.dbsyncer.connector.constant.DatabaseConstant;
 import org.dbsyncer.connector.database.AbstractDatabaseConnector;
 import org.dbsyncer.connector.database.DatabaseConnectorMapper;
 import org.dbsyncer.connector.model.PageSql;
 import org.dbsyncer.connector.model.Table;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 public final class SqlServerConnector extends AbstractDatabaseConnector {
 
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
     @Override
     public List<Table> getTable(DatabaseConnectorMapper connectorMapper) {
         DatabaseConfig config = connectorMapper.getConfig();
-        return super.getTable(connectorMapper, String.format("SELECT NAME FROM SYS.TABLES WHERE SCHEMA_ID = SCHEMA_ID('%s') AND IS_MS_SHIPPED = 0", config.getSchema()));
+        return super.getTable(connectorMapper, String.format("select name from sys.tables where schema_id = schema_id('%s') and is_ms_shipped = 0", config.getSchema()));
     }
 
     @Override
@@ -33,33 +26,19 @@ public final class SqlServerConnector extends AbstractDatabaseConnector {
 
     @Override
     public Object[] getPageArgs(int pageIndex, int pageSize) {
-        return new Object[]{(pageIndex - 1) * pageSize + 1, pageIndex * pageSize};
+        return new Object[] {(pageIndex - 1) * pageSize + 1, pageIndex * pageSize};
     }
 
     @Override
-    public Map<String, String> getSourceCommand(CommandConfig commandConfig) {
-        // 获取过滤SQL
-        String queryFilterSql = this.getQueryFilterSql(commandConfig.getFilter());
-
-        // 获取查询SQL
-        Table table = commandConfig.getTable();
-        Map<String, String> map = new HashMap<>();
-
-        String query = ConnectorConstant.OPERTION_QUERY;
-        map.put(query, this.buildSql(query, commandConfig, queryFilterSql));
-
-        // 获取查询总数SQL
-        StringBuilder queryCount = new StringBuilder();
+    protected String getQueryCountSql(CommandConfig commandConfig, String schema, String quotation, String queryFilterSql) {
+        // 有过滤条件,走默认方式
         if (StringUtil.isNotBlank(queryFilterSql)) {
-            queryCount.append("SELECT COUNT(*) FROM ").append(table.getName()).append(queryFilterSql);
-        } else {
-            DatabaseConfig cfg = (DatabaseConfig) commandConfig.getConnectorConfig();
-            // 从存储过程查询(定时更新总数,可能存在误差)
-            queryCount.append("SELECT ROWS FROM SYSINDEXES WHERE ID = OBJECT_ID('").append(cfg.getSchema()).append(".").append(table.getName()).append(
-                    "') AND INDID IN (0, 1)");
+            return super.getQueryCountSql(commandConfig, schema, quotation, queryFilterSql);
         }
-        map.put(ConnectorConstant.OPERTION_QUERY_COUNT, queryCount.toString());
-        return map;
-    }
 
+        String table = commandConfig.getTable().getName();
+        DatabaseConfig cfg = (DatabaseConfig) commandConfig.getConnectorConfig();
+        // 从存储过程查询(定时更新总数,可能存在误差)
+        return String.format("select rows from sysindexes where id = object_id('%s.%s') and indid in (0, 1)", cfg.getSchema(), table);
+    }
 }

+ 18 - 0
dbsyncer-connector/src/main/java/org/dbsyncer/connector/util/DatabaseUtil.java

@@ -6,6 +6,9 @@ import org.dbsyncer.connector.ConnectorException;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
+import java.util.regex.Matcher;
+
+import static java.util.regex.Pattern.compile;
 
 public abstract class DatabaseUtil {
 
@@ -33,4 +36,19 @@ public abstract class DatabaseUtil {
         }
     }
 
+    public static String getDatabaseName(String url) {
+        Matcher matcher = compile("(//)(?!(\\?)).+?(\\?)").matcher(url);
+        while (matcher.find()) {
+            url = matcher.group(0);
+            break;
+        }
+        int s = url.lastIndexOf("/");
+        int e = url.lastIndexOf("?");
+        if (s > 0 && e > 0) {
+            return StringUtil.substring(url, s + 1, e);
+        }
+
+        throw new ConnectorException("database is invalid");
+    }
+
 }

+ 45 - 1
dbsyncer-connector/src/main/test/SqlServerConnectionTest.java

@@ -1,9 +1,16 @@
+import oracle.jdbc.OracleConnection;
 import org.dbsyncer.connector.config.DatabaseConfig;
 import org.dbsyncer.connector.database.DatabaseConnectorMapper;
+import org.dbsyncer.connector.database.ds.SimpleConnection;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.core.BatchPreparedStatementSetter;
 
+import java.nio.charset.Charset;
+import java.sql.Clob;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
 import java.time.LocalDateTime;
 import java.util.concurrent.*;
 
@@ -16,6 +23,43 @@ public class SqlServerConnectionTest {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
+    @Test
+    public void testByte() {
+        DatabaseConfig config = new DatabaseConfig();
+        config.setUrl("jdbc:oracle:thin:@127.0.0.1:1521:XE");
+        config.setUsername("ae86");
+        config.setPassword("123");
+        config.setDriverClassName("oracle.jdbc.OracleDriver");
+        final DatabaseConnectorMapper connectorMapper = new DatabaseConnectorMapper(config);
+
+        String executeSql="UPDATE \"my_user\" SET \"name\"=?,\"clo\"=? WHERE \"id\"=?";
+        int[] execute = connectorMapper.execute(databaseTemplate ->
+                databaseTemplate.batchUpdate(executeSql, new BatchPreparedStatementSetter() {
+                    @Override
+                    public void setValues(PreparedStatement ps, int i) {
+                        try {
+                            SimpleConnection connection = (SimpleConnection) databaseTemplate.getConnection();
+                            OracleConnection conn = (OracleConnection) connection.getConnection();
+                            Clob clob = conn.createClob();
+                            clob.setString(1, new String("中文888".getBytes(Charset.defaultCharset())));
+
+                            ps.setString(1, "hello888");
+                            ps.setClob(2, clob);
+                            ps.setInt(3, 2);
+                        } catch (SQLException e) {
+                            e.printStackTrace();
+                        }
+                    }
+
+                    @Override
+                    public int getBatchSize() {
+                        return 1;
+                    }
+                })
+        );
+        logger.info("execute:{}", execute);
+    }
+
     @Test
     public void testConnection() throws InterruptedException {
         DatabaseConfig config = new DatabaseConfig();
@@ -65,4 +109,4 @@ public class SqlServerConnectionTest {
         TimeUnit.SECONDS.sleep(3);
         logger.info("test end");
     }
-}
+}

+ 1 - 2
dbsyncer-listener/src/main/java/org/dbsyncer/listener/AbstractDatabaseExtractor.java

@@ -2,7 +2,6 @@ package org.dbsyncer.listener;
 
 import org.dbsyncer.common.event.RowChangedEvent;
 import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.config.DatabaseConfig;
 import org.dbsyncer.connector.constant.ConnectorConstant;
 import org.dbsyncer.connector.database.DatabaseConnectorMapper;
@@ -39,7 +38,7 @@ public abstract class AbstractDatabaseExtractor extends AbstractExtractor {
             switch (event.getEvent()){
                 case ConnectorConstant.OPERTION_UPDATE:
                 case ConnectorConstant.OPERTION_INSERT:
-                    event.setAfterData(queryData(event.getAfterData()));
+                    event.setDataList(queryData(event.getDataList()));
                     break;
                 default:
                     break;

+ 1 - 2
dbsyncer-listener/src/main/java/org/dbsyncer/listener/file/FileExtractor.java

@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.file.*;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -149,7 +148,7 @@ public class FileExtractor extends AbstractExtractor {
                 snapshot.put(filePosKey, String.valueOf(raf.getFilePointer()));
                 if (StringUtil.isNotBlank(line)) {
                     List<Object> row = fileResolver.parseList(pipelineResolver.fields, separator, line);
-                    changedEvent(new RowChangedEvent(fileName, ConnectorConstant.OPERTION_UPDATE, Collections.EMPTY_LIST, row));
+                    changedEvent(new RowChangedEvent(fileName, ConnectorConstant.OPERTION_UPDATE, row));
                 }
             }
 

+ 5 - 20
dbsyncer-listener/src/main/java/org/dbsyncer/listener/mysql/MysqlExtractor.java

@@ -6,6 +6,7 @@ import org.dbsyncer.common.event.RowChangedEvent;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.config.DatabaseConfig;
 import org.dbsyncer.connector.constant.ConnectorConstant;
+import org.dbsyncer.connector.util.DatabaseUtil;
 import org.dbsyncer.listener.AbstractDatabaseExtractor;
 import org.dbsyncer.listener.ListenerException;
 import org.dbsyncer.listener.config.Host;
@@ -86,7 +87,7 @@ public class MysqlExtractor extends AbstractDatabaseExtractor {
         if (StringUtil.isBlank(config.getUrl())) {
             throw new ListenerException("url is invalid");
         }
-        database = readDatabaseName(config.getUrl());
+        database = DatabaseUtil.getDatabaseName(config.getUrl());
         cluster = readNodes(config.getUrl());
         Assert.notEmpty(cluster, "Mysql连接地址有误.");
 
@@ -104,21 +105,6 @@ public class MysqlExtractor extends AbstractDatabaseExtractor {
         client.connect();
     }
 
-    private String readDatabaseName(String url) {
-        Matcher matcher = compile("(//)(?!(\\?)).+?(\\?)").matcher(url);
-        while (matcher.find()) {
-            url = matcher.group(0);
-            break;
-        }
-        int s = url.lastIndexOf("/");
-        int e = url.lastIndexOf("?");
-        if (s > 0 && e > 0) {
-            return StringUtil.substring(url, s + 1, e);
-        }
-
-        throw new ListenerException("database is invalid");
-    }
-
     private List<Host> readNodes(String url) {
         Matcher matcher = compile("(//)(?!(/)).+?(/)").matcher(url);
         while (matcher.find()) {
@@ -242,9 +228,8 @@ public class MysqlExtractor extends AbstractDatabaseExtractor {
                 UpdateRowsEventData data = event.getData();
                 if (isFilterTable(data.getTableId())) {
                     data.getRows().forEach(m -> {
-                        List<Object> before = Stream.of(m.getKey()).collect(Collectors.toList());
                         List<Object> after = Stream.of(m.getValue()).collect(Collectors.toList());
-                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_UPDATE, before, after));
+                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_UPDATE, after));
                     });
                 }
                 return;
@@ -255,7 +240,7 @@ public class MysqlExtractor extends AbstractDatabaseExtractor {
                 if (isFilterTable(data.getTableId())) {
                     data.getRows().forEach(m -> {
                         List<Object> after = Stream.of(m).collect(Collectors.toList());
-                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_INSERT, Collections.EMPTY_LIST, after));
+                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_INSERT, after));
                     });
                 }
                 return;
@@ -266,7 +251,7 @@ public class MysqlExtractor extends AbstractDatabaseExtractor {
                 if (isFilterTable(data.getTableId())) {
                     data.getRows().forEach(m -> {
                         List<Object> before = Stream.of(m).collect(Collectors.toList());
-                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_DELETE, before, Collections.EMPTY_LIST));
+                        sendChangedEvent(new RowChangedEvent(getTableName(data.getTableId()), ConnectorConstant.OPERTION_DELETE, before));
                     });
                 }
                 return;

+ 3 - 3
dbsyncer-listener/src/main/java/org/dbsyncer/listener/oracle/dcn/DBChangeNotification.java

@@ -353,15 +353,15 @@ public class DBChangeNotification {
             List<Object> data = new ArrayList<>();
             if (event.getCode() == TableChangeDescription.TableOperation.UPDATE.getCode()) {
                 read(event.getTableName(), event.getRowId(), data);
-                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_UPDATE, Collections.EMPTY_LIST, data)));
+                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_UPDATE, data)));
 
             } else if (event.getCode() == TableChangeDescription.TableOperation.INSERT.getCode()) {
                 read(event.getTableName(), event.getRowId(), data);
-                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_INSERT, Collections.EMPTY_LIST, data)));
+                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_INSERT, data)));
 
             } else {
                 data.add(event.getRowId());
-                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_DELETE, data, Collections.EMPTY_LIST)));
+                listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_DELETE, data)));
             }
         }
     }

+ 3 - 4
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/AbstractMessageDecoder.java

@@ -1,7 +1,6 @@
 package org.dbsyncer.listener.postgresql;
 
 import org.dbsyncer.connector.config.DatabaseConfig;
-import org.dbsyncer.listener.postgresql.column.ColumnValue;
 import org.dbsyncer.listener.postgresql.column.PgColumnValue;
 import org.dbsyncer.listener.postgresql.enums.MessageTypeEnum;
 import org.postgresql.replication.LogSequenceNumber;
@@ -20,7 +19,7 @@ public abstract class AbstractMessageDecoder implements MessageDecoder {
 
     protected DatabaseConfig config;
 
-    private ColumnValue value = new PgColumnValue();
+    private static final PgColumnValue value = new PgColumnValue();
 
     @Override
     public boolean skipMessage(ByteBuffer buffer, LogSequenceNumber startLsn, LogSequenceNumber lastReceiveLsn) {
@@ -65,7 +64,7 @@ public abstract class AbstractMessageDecoder implements MessageDecoder {
     }
 
     /**
-     * Resolve the value of a {@link ColumnValue}.
+     * Resolve value
      *
      * @param typeName
      * @param columnValue
@@ -112,7 +111,7 @@ public abstract class AbstractMessageDecoder implements MessageDecoder {
 
             case "numeric":
             case "decimal":
-                return value.asDecimal();
+                return value.asBigDecimal();
 
             case "character":
             case "char":

+ 0 - 182
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/AbstractColumnValue.java

@@ -1,182 +0,0 @@
-package org.dbsyncer.listener.postgresql.column;
-
-import org.dbsyncer.common.util.DateFormatUtil;
-import org.dbsyncer.listener.ListenerException;
-import org.postgresql.PGStatement;
-import org.postgresql.geometric.*;
-import org.postgresql.util.PGInterval;
-import org.postgresql.util.PGmoney;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.SQLException;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.time.Instant;
-import java.time.OffsetDateTime;
-import java.time.OffsetTime;
-import java.time.ZoneOffset;
-import java.util.concurrent.TimeUnit;
-
-public abstract class AbstractColumnValue implements ColumnValue {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    @Override
-    public Date asDate() {
-        return DateFormatUtil.stringToDate(asString());
-    }
-
-    @Override
-    public Object asTime() {
-        return asString();
-    }
-
-    @Override
-    public Object asLocalTime() {
-        return DateFormatUtil.stringToLocalTime(asString());
-    }
-
-    @Override
-    public OffsetTime asOffsetTimeUtc() {
-        return DateFormatUtil.timeWithTimeZone(asString());
-    }
-
-    @Override
-    public OffsetDateTime asOffsetDateTimeAtUtc() {
-        if ("infinity".equals(asString())) {
-            return OffsetDateTime.ofInstant(toInstantFromMillis(PGStatement.DATE_POSITIVE_INFINITY), ZoneOffset.UTC);
-        } else if ("-infinity".equals(asString())) {
-            return OffsetDateTime.ofInstant(toInstantFromMillis(PGStatement.DATE_NEGATIVE_INFINITY), ZoneOffset.UTC);
-        }
-        return DateFormatUtil.timestampWithTimeZoneToOffsetDateTime(asString());
-    }
-
-    @Override
-    public Timestamp asTimestamp() {
-        if ("infinity".equals(asString())) {
-            return Timestamp.from(toInstantFromMicros(PGStatement.DATE_POSITIVE_INFINITY));
-        } else if ("-infinity".equals(asString())) {
-            return Timestamp.from(toInstantFromMicros(PGStatement.DATE_NEGATIVE_INFINITY));
-        }
-        return DateFormatUtil.stringToTimestamp(asString());
-    }
-
-    @Override
-    public PGbox asBox() {
-        try {
-            return new PGbox(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGcircle asCircle() {
-        try {
-            return new PGcircle(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse circle {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public Object asInterval() {
-        try {
-            return new PGInterval(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGline asLine() {
-        try {
-            return new PGline(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGlseg asLseg() {
-        try {
-            return new PGlseg(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGmoney asMoney() {
-        try {
-            final String value = asString();
-            if (value != null && value.startsWith("-")) {
-                final String negativeMoney = "(" + value.substring(1) + ")";
-                return new PGmoney(negativeMoney);
-            }
-            return new PGmoney(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse money {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGpath asPath() {
-        try {
-            return new PGpath(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGpoint asPoint() {
-        try {
-            return new PGpoint(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public PGpolygon asPolygon() {
-        try {
-            return new PGpolygon(asString());
-        } catch (final SQLException e) {
-            logger.error("Failed to parse point {}, {}", asString(), e);
-            throw new ListenerException(e);
-        }
-    }
-
-    @Override
-    public boolean isArray() {
-        return false;
-    }
-
-    @Override
-    public Object asArray() {
-        return null;
-    }
-
-    private Instant toInstantFromMicros(long microsSinceEpoch) {
-        return Instant.ofEpochSecond(
-                TimeUnit.MICROSECONDS.toSeconds(microsSinceEpoch),
-                TimeUnit.MICROSECONDS.toNanos(microsSinceEpoch % TimeUnit.SECONDS.toMicros(1)));
-    }
-
-    private Instant toInstantFromMillis(long millisecondSinceEpoch) {
-        return Instant.ofEpochSecond(
-                TimeUnit.MILLISECONDS.toSeconds(millisecondSinceEpoch),
-                TimeUnit.MILLISECONDS.toNanos(millisecondSinceEpoch % TimeUnit.SECONDS.toMillis(1)));
-    }
-
-}

+ 0 - 73
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/ColumnValue.java

@@ -1,73 +0,0 @@
-package org.dbsyncer.listener.postgresql.column;
-
-import org.postgresql.geometric.*;
-import org.postgresql.util.PGmoney;
-
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.time.OffsetDateTime;
-import java.time.OffsetTime;
-
-/**
- * @author AE86
- * @version 1.0.0
- * @date 2022/4/22 22:39
- * @see org.postgresql.jdbc.TypeInfoCache
- */
-public interface ColumnValue {
-
-    void setValue(String value);
-
-    boolean isNull();
-
-    String asString();
-
-    Boolean asBoolean();
-
-    Integer asInteger();
-
-    Long asLong();
-
-    Float asFloat();
-
-    Double asDouble();
-
-    Object asDecimal();
-
-    Date asDate();
-
-    OffsetDateTime asOffsetDateTimeAtUtc();
-
-    Timestamp asTimestamp();
-
-    Object asTime();
-
-    Object asLocalTime();
-
-    OffsetTime asOffsetTimeUtc();
-
-    byte[] asByteArray();
-
-    PGbox asBox();
-
-    PGcircle asCircle();
-
-    Object asInterval();
-
-    PGline asLine();
-
-    Object asLseg();
-
-    PGmoney asMoney();
-
-    PGpath asPath();
-
-    PGpoint asPoint();
-
-    PGpolygon asPolygon();
-
-    boolean isArray();
-
-    Object asArray();
-
-}

+ 167 - 20
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/column/PgColumnValue.java

@@ -1,59 +1,206 @@
 package org.dbsyncer.listener.postgresql.column;
 
+import org.dbsyncer.common.column.AbstractColumnValue;
+import org.dbsyncer.common.util.DateFormatUtil;
 import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.listener.ListenerException;
+import org.postgresql.PGStatement;
+import org.postgresql.geometric.*;
+import org.postgresql.util.PGInterval;
+import org.postgresql.util.PGmoney;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.time.*;
+import java.util.concurrent.TimeUnit;
 
-public final class PgColumnValue extends AbstractColumnValue {
+public final class PgColumnValue extends AbstractColumnValue<String> {
 
-    private String value;
-
-    public void setValue(String value) {
-        this.value = value;
-    }
+    private final Logger logger = LoggerFactory.getLogger(getClass());
 
     @Override
-    public boolean isNull() {
-        return value == null;
+    public String asString() {
+        return getValue();
     }
 
     @Override
-    public String asString() {
-        return value;
+    public byte[] asByteArray() {
+        return StringUtil.hexStringToByteArray(getValue().substring(2));
     }
 
     @Override
-    public Boolean asBoolean() {
-        return "t".equalsIgnoreCase(value);
+    public Short asShort() {
+        return Short.valueOf(getValue());
     }
 
     @Override
     public Integer asInteger() {
-        return Integer.valueOf(value);
+        return Integer.valueOf(getValue());
     }
 
     @Override
     public Long asLong() {
-        return Long.valueOf(value);
+        return Long.valueOf(getValue());
     }
 
     @Override
     public Float asFloat() {
-        return Float.valueOf(value);
+        return Float.valueOf(getValue());
     }
 
     @Override
     public Double asDouble() {
-        return Double.valueOf(value);
+        return Double.valueOf(getValue());
     }
 
     @Override
-    public Object asDecimal() {
-        return new BigDecimal(value);
+    public Boolean asBoolean() {
+        return "t".equalsIgnoreCase(getValue());
     }
 
     @Override
-    public byte[] asByteArray() {
-        return StringUtil.hexStringToByteArray(value.substring(2));
+    public BigDecimal asBigDecimal() {
+        return new BigDecimal(getValue());
+    }
+
+    @Override
+    public Date asDate() {
+        return DateFormatUtil.stringToDate(asString());
+    }
+
+    @Override
+    public Timestamp asTimestamp() {
+        if ("infinity".equals(asString())) {
+            return Timestamp.from(toInstantFromMicros(PGStatement.DATE_POSITIVE_INFINITY));
+        } else if ("-infinity".equals(asString())) {
+            return Timestamp.from(toInstantFromMicros(PGStatement.DATE_NEGATIVE_INFINITY));
+        }
+        return DateFormatUtil.stringToTimestamp(asString());
+    }
+
+    @Override
+    public Time asTime() {
+        return Time.valueOf(getValue());
+    }
+
+    public LocalTime asLocalTime() {
+        return DateFormatUtil.stringToLocalTime(asString());
+    }
+
+    public OffsetTime asOffsetTimeUtc() {
+        return DateFormatUtil.timeWithTimeZone(asString());
+    }
+
+    public OffsetDateTime asOffsetDateTimeAtUtc() {
+        if ("infinity".equals(asString())) {
+            return OffsetDateTime.ofInstant(toInstantFromMillis(PGStatement.DATE_POSITIVE_INFINITY), ZoneOffset.UTC);
+        } else if ("-infinity".equals(asString())) {
+            return OffsetDateTime.ofInstant(toInstantFromMillis(PGStatement.DATE_NEGATIVE_INFINITY), ZoneOffset.UTC);
+        }
+        return DateFormatUtil.timestampWithTimeZoneToOffsetDateTime(asString());
     }
+
+    public PGbox asBox() {
+        try {
+            return new PGbox(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGcircle asCircle() {
+        try {
+            return new PGcircle(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse circle {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public Object asInterval() {
+        try {
+            return new PGInterval(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGline asLine() {
+        try {
+            return new PGline(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGlseg asLseg() {
+        try {
+            return new PGlseg(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGmoney asMoney() {
+        try {
+            final String value = asString();
+            if (value != null && value.startsWith("-")) {
+                final String negativeMoney = "(" + value.substring(1) + ")";
+                return new PGmoney(negativeMoney);
+            }
+            return new PGmoney(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse money {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGpath asPath() {
+        try {
+            return new PGpath(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGpoint asPoint() {
+        try {
+            return new PGpoint(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    public PGpolygon asPolygon() {
+        try {
+            return new PGpolygon(asString());
+        } catch (final SQLException e) {
+            logger.error("Failed to parse point {}, {}", asString(), e);
+            throw new ListenerException(e);
+        }
+    }
+
+    private Instant toInstantFromMicros(long microsSinceEpoch) {
+        return Instant.ofEpochSecond(
+                TimeUnit.MICROSECONDS.toSeconds(microsSinceEpoch),
+                TimeUnit.MICROSECONDS.toNanos(microsSinceEpoch % TimeUnit.SECONDS.toMicros(1)));
+    }
+
+    private Instant toInstantFromMillis(long millisecondSinceEpoch) {
+        return Instant.ofEpochSecond(
+                TimeUnit.MILLISECONDS.toSeconds(millisecondSinceEpoch),
+                TimeUnit.MILLISECONDS.toNanos(millisecondSinceEpoch % TimeUnit.SECONDS.toMillis(1)));
+    }
+
 }

+ 4 - 6
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/decoder/PgOutputMessageDecoder.java

@@ -29,7 +29,7 @@ public class PgOutputMessageDecoder extends AbstractMessageDecoder {
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
     private static final LocalDateTime PG_EPOCH = LocalDateTime.of(2000, 1, 1, 0, 0, 0);
-    private static final String GET_TABLE_SCHEMA = "select oid,relname as tableName from pg_class t inner join (select ns.oid as nspoid, ns.nspname from pg_namespace ns where ns.nspname = (select (current_schemas(false))[s.r] from generate_series(1, array_upper(current_schemas(false), 1)) as s(r))) as n on n.nspoid = t.relnamespace where relkind = 'r'";
+    private static final String GET_TABLE_SCHEMA = "select oid,relname as tableName from pg_class t inner join (select ns.oid as nspoid, ns.nspname from pg_namespace ns where ns.nspname = '%s') as n on n.nspoid = t.relnamespace where relkind = 'r'";
     private static final Map<Integer, TableId> tables = new LinkedHashMap<>();
     private ConnectorFactory connectorFactory;
     private DatabaseConnectorMapper connectorMapper;
@@ -114,7 +114,8 @@ public class PgOutputMessageDecoder extends AbstractMessageDecoder {
     }
 
     private void readSchema() {
-        List<Map> schemas = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForList(GET_TABLE_SCHEMA));
+        final String querySchema = String.format(GET_TABLE_SCHEMA, config.getSchema());
+        List<Map> schemas = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForList(querySchema));
         if (!CollectionUtils.isEmpty(schemas)) {
             schemas.forEach(map -> {
                 Long oid = (Long) map.get("oid");
@@ -137,10 +138,7 @@ public class PgOutputMessageDecoder extends AbstractMessageDecoder {
                 case "O":
                     List<Object> data = new ArrayList<>();
                     readTupleData(tableId, buffer, data);
-                    if (MessageTypeEnum.DELETE == type) {
-                        return new RowChangedEvent(tableId.tableName, type.name(), data, Collections.EMPTY_LIST);
-                    }
-                    return new RowChangedEvent(tableId.tableName, type.name(), Collections.EMPTY_LIST, data);
+                    return new RowChangedEvent(tableId.tableName, type.name(), data);
 
                 default:
                     logger.info("N, K, O not set, got instead {}", newTuple);

+ 1 - 4
dbsyncer-listener/src/main/java/org/dbsyncer/listener/postgresql/decoder/TestDecodingMessageDecoder.java

@@ -81,11 +81,8 @@ public class TestDecodingMessageDecoder extends AbstractMessageDecoder {
         switch (eventType) {
             case ConnectorConstant.OPERTION_UPDATE:
             case ConnectorConstant.OPERTION_INSERT:
-                event = new RowChangedEvent(table, eventType, Collections.EMPTY_LIST, data);
-                break;
-
             case ConnectorConstant.OPERTION_DELETE:
-                event = new RowChangedEvent(table, eventType, data, Collections.EMPTY_LIST);
+                event = new RowChangedEvent(table, eventType, data);
                 break;
 
             default:

+ 4 - 5
dbsyncer-listener/src/main/java/org/dbsyncer/listener/quartz/AbstractQuartzExtractor.java

@@ -13,7 +13,6 @@ import org.dbsyncer.listener.AbstractExtractor;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -113,21 +112,21 @@ public abstract class AbstractQuartzExtractor extends AbstractExtractor implemen
             Object event = null;
             for (Map<String, Object> row : data) {
                 if(StringUtil.isBlank(eventFieldName)){
-                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_UPDATE, Collections.EMPTY_MAP, row));
+                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_UPDATE, row));
                     continue;
                 }
 
                 event = row.get(eventFieldName);
                 if (update.contains(event)) {
-                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_UPDATE, Collections.EMPTY_MAP, row));
+                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_UPDATE, row));
                     continue;
                 }
                 if (insert.contains(event)) {
-                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_INSERT, Collections.EMPTY_MAP, row));
+                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_INSERT, row));
                     continue;
                 }
                 if (delete.contains(event)) {
-                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_DELETE, row, Collections.EMPTY_MAP));
+                    changedEvent(new RowChangedEvent(index, ConnectorConstant.OPERTION_DELETE, row));
                     continue;
                 }
 

+ 17 - 21
dbsyncer-listener/src/main/java/org/dbsyncer/listener/sqlserver/SqlServerExtractor.java

@@ -32,25 +32,24 @@ public class SqlServerExtractor extends AbstractDatabaseExtractor {
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
     private static final String STATEMENTS_PLACEHOLDER = "#";
-    private static final String GET_DATABASE_NAME = "SELECT db_name()";
-    private static final String GET_TABLE_LIST = "SELECT NAME FROM SYS.TABLES WHERE SCHEMA_ID = SCHEMA_ID('#') AND IS_MS_SHIPPED = 0";
-    private static final String IS_SERVER_AGENT_RUNNING = "EXEC master.#.xp_servicecontrol N'QUERYSTATE', N'SQLSERVERAGENT'";
-    private static final String IS_DB_CDC_ENABLED = "SELECT is_cdc_enabled FROM sys.databases WHERE name = '#'";
-    private static final String IS_TABLE_CDC_ENABLED = "SELECT COUNT(*) FROM sys.tables tb WHERE tb.is_tracked_by_cdc = 1 AND tb.name='#'";
-    private static final String ENABLE_DB_CDC = "IF EXISTS(select 1 from sys.databases where name = '#' AND is_cdc_enabled=0) EXEC sys.sp_cdc_enable_db";
-    private static final String ENABLE_TABLE_CDC = "IF EXISTS(select 1 from sys.tables where name = '#' AND is_tracked_by_cdc=0) EXEC sys.sp_cdc_enable_table @source_schema = N'%s', @source_name = N'#', @role_name = NULL, @supports_net_changes = 0";
+    private static final String GET_DATABASE_NAME = "select db_name()";
+    private static final String GET_TABLE_LIST = "select name from sys.tables where schema_id = schema_id('#') and is_ms_shipped = 0";
+    private static final String IS_DB_CDC_ENABLED = "select is_cdc_enabled from sys.databases where name = '#'";
+    private static final String IS_TABLE_CDC_ENABLED = "select count(*) from sys.tables tb where tb.is_tracked_by_cdc = 1 and tb.name='#'";
+    private static final String ENABLE_DB_CDC = "IF EXISTS(select 1 from sys.databases where name = '#' and is_cdc_enabled=0) EXEC sys.sp_cdc_enable_db";
+    private static final String ENABLE_TABLE_CDC = "IF EXISTS(select 1 from sys.tables where name = '#' and is_tracked_by_cdc=0) EXEC sys.sp_cdc_enable_table @source_schema = N'%s', @source_name = N'#', @role_name = NULL, @supports_net_changes = 0";
     private static final String GET_TABLES_CDC_ENABLED = "EXEC sys.sp_cdc_help_change_data_capture";
-    private static final String GET_MAX_LSN = "SELECT sys.fn_cdc_get_max_lsn()";
-    private static final String GET_MIN_LSN = "SELECT sys.fn_cdc_get_min_lsn('#')";
-    private static final String GET_INCREMENT_LSN = "SELECT sys.fn_cdc_increment_lsn(?)";
-    private static final String GET_ALL_CHANGES_FOR_TABLE = "SELECT * FROM cdc.[fn_cdc_get_all_changes_#](?, ?, N'all update old') order by [__$start_lsn] ASC, [__$seqval] ASC, [__$operation] ASC";
+    private static final String GET_MAX_LSN = "select sys.fn_cdc_get_max_lsn()";
+    private static final String GET_MIN_LSN = "select sys.fn_cdc_get_min_lsn('#')";
+    private static final String GET_INCREMENT_LSN = "select sys.fn_cdc_increment_lsn(?)";
+    private static final String GET_ALL_CHANGES_FOR_TABLE = "select * from cdc.[fn_cdc_get_all_changes_#](?, ?, N'all update old') order by [__$start_lsn] ASC, [__$seqval] ASC, [__$operation] ASC";
 
     private static final String LSN_POSITION = "position";
     private static final int OFFSET_COLUMNS = 4;
     private final Lock connectLock = new ReentrantLock();
     private volatile boolean connected;
-    private Set<String> tables;
-    private Set<SqlServerChangeTable> changeTables;
+    private static Set<String> tables;
+    private static Set<SqlServerChangeTable> changeTables;
     private DatabaseConnectorMapper connectorMapper;
     private Worker worker;
     private Lsn lastLsn;
@@ -70,9 +69,6 @@ public class SqlServerExtractor extends AbstractDatabaseExtractor {
             readTables();
             Assert.notEmpty(tables, "No tables available");
 
-            boolean enabledServerAgent = queryAndMap(IS_SERVER_AGENT_RUNNING.replace(STATEMENTS_PLACEHOLDER, schema), rs -> "Running.".equals(rs.getString(1)));
-            Assert.isTrue(enabledServerAgent, "Please ensure that the SQL Server Agent is running");
-
             enableDBCDC();
             enableTableCDC();
             readChangeTables();
@@ -250,17 +246,17 @@ public class SqlServerExtractor extends AbstractDatabaseExtractor {
         for (CDCEvent event : list) {
             int code = event.getCode();
             if (TableOperationEnum.isUpdateAfter(code)) {
-                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_UPDATE, Collections.EMPTY_LIST, event.getRow()));
+                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_UPDATE, event.getRow()));
                 continue;
             }
 
             if (TableOperationEnum.isInsert(code)) {
-                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_INSERT, Collections.EMPTY_LIST, event.getRow()));
+                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_INSERT, event.getRow()));
                 continue;
             }
 
             if (TableOperationEnum.isDelete(code)) {
-                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_DELETE, event.getRow(), Collections.EMPTY_LIST));
+                sendChangedEvent(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_DELETE, event.getRow()));
             }
         }
     }
@@ -329,10 +325,10 @@ public class SqlServerExtractor extends AbstractDatabaseExtractor {
                         continue;
                     }
 
+                    pull(stopLsn);
+
                     lastLsn = stopLsn;
                     snapshot.put(LSN_POSITION, lastLsn.toString());
-
-                    pull(stopLsn);
                 } catch (Exception e) {
                     logger.error(e.getMessage());
                     sleepInMills(1000L);

+ 1 - 1
dbsyncer-listener/src/main/test/DBChangeNotificationTest.java

@@ -28,7 +28,7 @@ public class DBChangeNotificationTest {
 
         final DBChangeNotification dcn = new DBChangeNotification(username, password, url);
         dcn.addRowEventListener((e) ->
-            logger.info("{}触发{}, before:{}, after:{}", e.getSourceTableName(), e.getEvent(), e.getBeforeData(), e.getAfterData())
+            logger.info("{}触发{}, data:{}", e.getSourceTableName(), e.getEvent(), e.getDataList())
         );
         dcn.start();
 

+ 16 - 14
dbsyncer-manager/src/main/java/org/dbsyncer/manager/puller/IncrementPuller.java

@@ -37,10 +37,10 @@ import org.springframework.util.Assert;
 
 import javax.annotation.PostConstruct;
 import java.time.Instant;
+import java.time.LocalDateTime;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executor;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
 
@@ -176,16 +176,17 @@ public class IncrementPuller extends AbstractPuller implements ScheduledTaskJob
     }
 
     abstract class AbstractListener implements Event {
+        private static final int FLUSH_DELAYED_SECONDS = 30;
         protected Mapping mapping;
         protected String metaId;
-        protected AtomicBoolean changed = new AtomicBoolean();
+        private LocalDateTime updateTime = LocalDateTime.now();
 
         @Override
         public void flushEvent(Map<String, String> map) {
-            // 如果有变更,执行更新
-            if (changed.compareAndSet(true, false)) {
+            // 30s内更新,执行写入
+            if (updateTime.isAfter(LocalDateTime.now().minusSeconds(FLUSH_DELAYED_SECONDS))) {
                 if (!CollectionUtils.isEmpty(map)) {
-                    logger.info("{}", map);
+                    logger.debug("{}", map);
                 }
                 forceFlushEvent(map);
             }
@@ -200,6 +201,11 @@ public class IncrementPuller extends AbstractPuller implements ScheduledTaskJob
             }
         }
 
+        @Override
+        public void refreshFlushEventUpdateTime() {
+            updateTime = LocalDateTime.now();
+        }
+
         @Override
         public void errorEvent(Exception e) {
             logService.log(LogType.TableGroupLog.INCREMENT_FAILED, e.getMessage());
@@ -245,13 +251,12 @@ public class IncrementPuller extends AbstractPuller implements ScheduledTaskJob
             final FieldPicker picker = tablePicker.get(rowChangedEvent.getTableGroupIndex());
             TableGroup tableGroup = picker.getTableGroup();
             rowChangedEvent.setSourceTableName(tableGroup.getSourceTable().getName());
-            rowChangedEvent.setTargetTableName(tableGroup.getTargetTable().getName());
 
             // 处理过程有异常向上抛
             parser.execute(mapping, tableGroup, rowChangedEvent);
 
             // 标记有变更记录
-            changed.compareAndSet(false, true);
+            refreshFlushEventUpdateTime();
         }
     }
 
@@ -304,17 +309,14 @@ public class IncrementPuller extends AbstractPuller implements ScheduledTaskJob
             List<FieldPicker> pickers = tablePicker.get(rowChangedEvent.getSourceTableName());
             if (!CollectionUtils.isEmpty(pickers)) {
                 pickers.forEach(picker -> {
-                    final Map<String, Object> before = picker.getColumns(rowChangedEvent.getBeforeData());
-                    final Map<String, Object> after = picker.getColumns(rowChangedEvent.getAfterData());
-                    if (picker.filter(StringUtil.equals(ConnectorConstant.OPERTION_DELETE, rowChangedEvent.getEvent()) ? before : after)) {
-                        rowChangedEvent.setBefore(before);
-                        rowChangedEvent.setAfter(after);
-                        rowChangedEvent.setTargetTableName(picker.getTableGroup().getTargetTable().getName());
+                    final Map<String, Object> dataMap = picker.getColumns(rowChangedEvent.getDataList());
+                    if (picker.filter(dataMap)) {
+                        rowChangedEvent.setDataMap(dataMap);
                         parser.execute(mapping, picker.getTableGroup(), rowChangedEvent);
                     }
                 });
                 // 标记有变更记录
-                changed.compareAndSet(false, true);
+                refreshFlushEventUpdateTime();
                 eventCounter.set(0);
                 return;
             }

+ 1 - 1
dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/Monitor.java

@@ -33,7 +33,7 @@ public interface Monitor {
 
     List<MetricEnum> getMetricEnumAll();
 
-    List<MetricResponse> getThreadPoolInfo();
+    List<MetricResponse> getMetricInfo();
 
     AppReportMetric getAppReportMetric();
 

+ 21 - 17
dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/MonitorFactory.java

@@ -1,6 +1,5 @@
 package org.dbsyncer.monitor;
 
-import org.dbsyncer.common.config.ThreadPoolConfig;
 import org.dbsyncer.common.model.Paging;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.StringUtil;
@@ -8,10 +7,12 @@ import org.dbsyncer.connector.constant.ConnectorConstant;
 import org.dbsyncer.manager.Manager;
 import org.dbsyncer.monitor.enums.MetricEnum;
 import org.dbsyncer.monitor.enums.StatisticEnum;
+import org.dbsyncer.monitor.enums.TaskMetricEnum;
 import org.dbsyncer.monitor.enums.ThreadPoolMetricEnum;
 import org.dbsyncer.monitor.model.AppReportMetric;
 import org.dbsyncer.monitor.model.MetricResponse;
 import org.dbsyncer.monitor.model.Sample;
+import org.dbsyncer.parser.flush.BufferActuator;
 import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.storage.constant.ConfigConstant;
@@ -25,7 +26,6 @@ import org.springframework.stereotype.Component;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.Executor;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.atomic.AtomicLong;
@@ -46,7 +46,10 @@ public class MonitorFactory implements Monitor {
     private Executor taskExecutor;
 
     @Autowired
-    private ThreadPoolConfig threadPoolConfig;
+    private BufferActuator writerBufferActuator;
+
+    @Autowired
+    private BufferActuator storageBufferActuator;
 
     @Override
     public Mapping getMapping(String mappingId) {
@@ -105,16 +108,18 @@ public class MonitorFactory implements Monitor {
     }
 
     @Override
-    public List<MetricResponse> getThreadPoolInfo() {
+    public List<MetricResponse> getMetricInfo() {
         ThreadPoolTaskExecutor threadTask = (ThreadPoolTaskExecutor) taskExecutor;
         ThreadPoolExecutor pool = threadTask.getThreadPoolExecutor();
 
         List<MetricResponse> list = new ArrayList<>();
-        list.add(createMetricResponse(ThreadPoolMetricEnum.TASK_SUBMITTED, pool.getTaskCount()));
-        list.add(createMetricResponse(ThreadPoolMetricEnum.QUEUE_UP, pool.getQueue().size()));
-        list.add(createMetricResponse(ThreadPoolMetricEnum.ACTIVE, pool.getActiveCount()));
-        list.add(createMetricResponse(ThreadPoolMetricEnum.COMPLETED, pool.getCompletedTaskCount()));
-        list.add(createMetricResponse(ThreadPoolMetricEnum.REMAINING_CAPACITY, pool.getQueue().remainingCapacity()));
+        list.add(createTaskMetricResponse(TaskMetricEnum.STORAGE_ACTIVE, storageBufferActuator.getQueue().size()));
+        list.add(createTaskMetricResponse(TaskMetricEnum.STORAGE_REMAINING_CAPACITY, storageBufferActuator.getQueueCapacity() - storageBufferActuator.getQueue().size()));
+        list.add(createThreadPoolMetricResponse(ThreadPoolMetricEnum.TASK_SUBMITTED, pool.getTaskCount()));
+        list.add(createThreadPoolMetricResponse(ThreadPoolMetricEnum.QUEUE_UP, pool.getQueue().size()));
+        list.add(createThreadPoolMetricResponse(ThreadPoolMetricEnum.ACTIVE, pool.getActiveCount()));
+        list.add(createThreadPoolMetricResponse(ThreadPoolMetricEnum.COMPLETED, pool.getCompletedTaskCount()));
+        list.add(createThreadPoolMetricResponse(ThreadPoolMetricEnum.REMAINING_CAPACITY, pool.getQueue().remainingCapacity()));
         return list;
     }
 
@@ -127,13 +132,8 @@ public class MonitorFactory implements Monitor {
         report.setInsert(getMappingInsert(metaAll));
         report.setUpdate(getMappingUpdate(metaAll));
         report.setDelete(getMappingDelete(metaAll));
-
-        // 线程池使用情况
-        ThreadPoolTaskExecutor threadTask = (ThreadPoolTaskExecutor) taskExecutor;
-        ThreadPoolExecutor pool = threadTask.getThreadPoolExecutor();
-        BlockingQueue<Runnable> queue = pool.getQueue();
-        report.setQueueUp(queue.size());
-        report.setQueueCapacity(threadPoolConfig.getQueueCapacity());
+        report.setQueueUp(writerBufferActuator.getQueue().size());
+        report.setQueueCapacity(writerBufferActuator.getQueueCapacity());
         return report;
     }
 
@@ -187,7 +187,11 @@ public class MonitorFactory implements Monitor {
         return queryMappingMetricCount(metaAll, (query) -> query.addFilter(ConfigConstant.DATA_EVENT, ConnectorConstant.OPERTION_DELETE));
     }
 
-    private MetricResponse createMetricResponse(ThreadPoolMetricEnum metricEnum, Object value) {
+    private MetricResponse createThreadPoolMetricResponse(ThreadPoolMetricEnum metricEnum, Object value) {
+        return new MetricResponse(metricEnum.getCode(), metricEnum.getGroup(), metricEnum.getMetricName(), Arrays.asList(new Sample(StatisticEnum.COUNT.getTagValueRepresentation(), value)));
+    }
+
+    private MetricResponse createTaskMetricResponse(TaskMetricEnum metricEnum, Object value) {
         return new MetricResponse(metricEnum.getCode(), metricEnum.getGroup(), metricEnum.getMetricName(), Arrays.asList(new Sample(StatisticEnum.COUNT.getTagValueRepresentation(), value)));
     }
 

+ 43 - 0
dbsyncer-monitor/src/main/java/org/dbsyncer/monitor/enums/TaskMetricEnum.java

@@ -0,0 +1,43 @@
+package org.dbsyncer.monitor.enums;
+
+/**
+ * 执行任务指标
+ *
+ * @author AE86
+ * @version 1.0.0
+ * @date 2021/07/23 0:19
+ */
+public enum TaskMetricEnum {
+
+    /**
+     * 处理中
+     */
+    STORAGE_ACTIVE("parser.storage.buffer.actuator.active", "持久化", "处理中"),
+
+    /**
+     * 空闲队列
+     */
+    STORAGE_REMAINING_CAPACITY("parser.storage.buffer.actuator.capacity", "持久化", "空闲队列");
+
+    private String code;
+    private String group;
+    private String metricName;
+
+    TaskMetricEnum(String code, String group, String metricName) {
+        this.code = code;
+        this.group = group;
+        this.metricName = metricName;
+    }
+
+    public String getCode() {
+        return code;
+    }
+
+    public String getGroup() {
+        return group;
+    }
+
+    public String getMetricName() {
+        return metricName;
+    }
+}

+ 98 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/AbstractWriterBinlog.java

@@ -0,0 +1,98 @@
+package org.dbsyncer.parser;
+
+import com.google.protobuf.ByteString;
+import org.dbsyncer.cache.CacheService;
+import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.connector.model.Field;
+import org.dbsyncer.parser.flush.BufferActuator;
+import org.dbsyncer.parser.model.Picker;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.WriterRequest;
+import org.dbsyncer.storage.binlog.AbstractBinlogRecorder;
+import org.dbsyncer.storage.binlog.proto.BinlogMap;
+import org.dbsyncer.storage.binlog.proto.BinlogMessage;
+import org.dbsyncer.storage.binlog.proto.EventEnum;
+import org.dbsyncer.storage.util.BinlogMessageUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Queue;
+
+public abstract class AbstractWriterBinlog extends AbstractBinlogRecorder<WriterRequest> {
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    @Autowired
+    private BufferActuator writerBufferActuator;
+
+    @Autowired
+    private CacheService cacheService;
+
+    protected void flush(String tableGroupId, String event, Map<String, Object> data) {
+        try {
+            BinlogMap.Builder dataBuilder = BinlogMap.newBuilder();
+            data.forEach((k, v) -> {
+                if (null != v) {
+                    ByteString bytes = BinlogMessageUtil.serializeValue(v);
+                    if (null != bytes) {
+                        dataBuilder.putRow(k, bytes);
+                    }
+                }
+            });
+
+            BinlogMessage builder = BinlogMessage.newBuilder()
+                    .setTableGroupId(tableGroupId)
+                    .setEvent(EventEnum.valueOf(event))
+                    .setData(dataBuilder.build())
+                    .build();
+            super.flush(builder);
+        } catch (Exception e) {
+            logger.error(e.getMessage());
+        }
+    }
+
+    @Override
+    protected WriterRequest deserialize(String messageId, BinlogMessage message) {
+        if (CollectionUtils.isEmpty(message.getData().getRowMap())) {
+            return null;
+        }
+
+        // 1、获取配置信息
+        final TableGroup tableGroup = cacheService.get(message.getTableGroupId(), TableGroup.class);
+
+        // 2、反序列数据
+        try {
+            final Picker picker = new Picker(tableGroup.getFieldMapping());
+            final Map<String, Field> fieldMap = picker.getTargetFieldMap();
+            Map<String, Object> data = new HashMap<>();
+            message.getData().getRowMap().forEach((k, v) -> {
+                if (fieldMap.containsKey(k)) {
+                    data.put(k, BinlogMessageUtil.deserializeValue(fieldMap.get(k).getType(), v));
+                }
+            });
+            return new WriterRequest(messageId, message.getTableGroupId(), message.getEvent().name(), data);
+        } catch (Exception e) {
+            logger.error(e.getMessage());
+        }
+        return null;
+    }
+
+    @Override
+    public String getTaskName() {
+        return "WriterBinlog";
+    }
+
+    @Override
+    public Queue getQueue() {
+        return writerBufferActuator.getQueue();
+    }
+
+    @Override
+    public int getQueueCapacity() {
+        return writerBufferActuator.getQueueCapacity();
+    }
+
+}

+ 14 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ParserFactory.java

@@ -5,6 +5,7 @@ import org.dbsyncer.common.event.RowChangedEvent;
 import org.dbsyncer.common.model.Result;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.JsonUtil;
+import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.ConnectorFactory;
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.config.CommandConfig;
@@ -298,10 +299,20 @@ public class ParserFactory implements Parser {
 
     @Override
     public void execute(Mapping mapping, TableGroup tableGroup, RowChangedEvent event) {
-        logger.info("Table[{}] {}, before:{}, after:{}", event.getSourceTableName(), event.getEvent(),
-                event.getBefore(), event.getAfter());
+        logger.debug("Table[{}] {}, data:{}", event.getSourceTableName(), event.getEvent(), event.getDataMap());
 
-        parserStrategy.execute(mapping, tableGroup, event);
+        // 1、获取映射字段
+        final Picker picker = new Picker(tableGroup.getFieldMapping());
+        final Map target = picker.pickData(event.getDataMap());
+
+        // 2、参数转换
+        ConvertUtil.convert(tableGroup.getConvert(), target);
+
+        // 3、插件转换
+        pluginFactory.convert(tableGroup.getPlugin(), event.getEvent(), event.getDataMap(), target);
+
+        // 4、处理数据
+        parserStrategy.execute(tableGroup.getId(), event.getEvent(), target);
     }
 
     /**

+ 67 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/convert/handler/StringToTimestampHandler.java

@@ -0,0 +1,67 @@
+package org.dbsyncer.parser.convert.handler;
+
+import org.dbsyncer.common.column.Lexer;
+import org.dbsyncer.common.util.DateFormatUtil;
+import org.dbsyncer.parser.convert.AbstractHandler;
+
+/**
+ * 字符串转Timestamp
+ *
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/7/12 23:04
+ */
+public class StringToTimestampHandler extends AbstractHandler {
+
+    @Override
+    public Object convert(String args, Object value) {
+        if (value instanceof String) {
+            String s = (String) value;
+            // 2020-7-12 00:00:00
+            if(s.length() < 19){
+                s = format(s);
+            }
+            value = DateFormatUtil.stringToTimestamp(s);
+        }
+        return value;
+    }
+
+    private String format(String s){
+        StringBuilder buf = new StringBuilder();
+        Lexer lexer = new Lexer(s);
+        char comma = '-';
+        // 年
+        nextToken(lexer, buf, comma);
+        // 月
+        nextToken(lexer, buf, comma);
+        // 日
+        comma = ' ';
+        nextToken(lexer, buf, comma);
+        // 时
+        comma = ':';
+        nextToken(lexer, buf, comma);
+        // 分
+        nextToken(lexer, buf, comma);
+        // 秒
+        nextToken(lexer, buf, comma, false);
+        return buf.toString();
+    }
+
+    private void nextToken(Lexer lexer, StringBuilder buf, char comma) {
+        nextToken(lexer, buf, comma, true);
+    }
+
+    private void nextToken(Lexer lexer, StringBuilder buf, char comma, boolean appendComma) {
+        buf.append(fillZero(lexer.nextToken(comma)));
+        if(appendComma){
+            buf.append(comma);
+        }
+    }
+
+    private String fillZero(String s){
+        if(s.length() < 2){
+            return String.format("%02d", Integer.parseInt(s));
+        }
+        return s;
+    }
+}

+ 6 - 6
dbsyncer-parser/src/main/java/org/dbsyncer/parser/convert/handler/DateToChineseStandardTimeHandler.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/convert/handler/TimestampToChineseStandardTimeHandler.java

@@ -3,22 +3,22 @@ package org.dbsyncer.parser.convert.handler;
 import org.dbsyncer.common.util.DateFormatUtil;
 import org.dbsyncer.parser.convert.AbstractHandler;
 
-import java.sql.Date;
+import java.sql.Timestamp;
 
 /**
- * Date转中国标准时间
+ * Timestamp转中国标准时间
  *
  * @author AE86
  * @version 1.0.0
  * @date 2021/12/20 23:04
  */
-public class DateToChineseStandardTimeHandler extends AbstractHandler {
+public class TimestampToChineseStandardTimeHandler extends AbstractHandler {
 
     @Override
     public Object convert(String args, Object value) {
-        if (value instanceof Date) {
-            Date d = (Date) value;
-            value = DateFormatUtil.dateToChineseStandardTimeString(d);
+        if (value instanceof Timestamp) {
+            Timestamp t = (Timestamp) value;
+            value = DateFormatUtil.timestampToString(t);
         }
         return value;
     }

+ 6 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/enums/ConvertEnum.java

@@ -30,6 +30,10 @@ public enum ConvertEnum {
      * Timestamp转Date
      */
     TIMESTAMP_TO_DATE("TIMESTAMP_TO_DATE", "Timestamp转Date", 0, new TimestampToDateHandler()),
+    /**
+     * Timestamp转中国标准时间
+     */
+    TIMESTAMP_TO_CHINESE_STANDARD_TIME("TIMESTAMP_TO_CHINESE_STANDARD_TIME", "Timestamp转yyyy-MM-dd HH:mm:ss", 0, new TimestampToChineseStandardTimeHandler()),
     /**
      * Timestamp转Long
      */
@@ -39,9 +43,9 @@ public enum ConvertEnum {
      */
     LONG_TO_TIMESTAMP("LONG_TO_TIMESTAMP", "Long转Timestamp", 0, new LongToTimestampHandler()),
     /**
-     * Date转中国标准时间
+     * String转Timestamp
      */
-    DATE_TO_CHINESE_STANDARD_TIME("DATE_TO_CHINESE_STANDARD_TIME", "Date转yyyy-MM-dd HH:mm:ss", 0, new DateToChineseStandardTimeHandler()),
+    STRING_TO_TIMESTAMP("STRING_TO_TIMESTAMP", "String转Timestamp", 0, new StringToTimestampHandler()),
     /**
      * Byte[]转String
      */

+ 14 - 25
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferActuator.java

@@ -1,5 +1,6 @@
 package org.dbsyncer.parser.flush;
 
+import org.dbsyncer.common.config.BufferActuatorConfig;
 import org.dbsyncer.common.scheduled.ScheduledTaskJob;
 import org.dbsyncer.common.scheduled.ScheduledTaskService;
 import org.slf4j.Logger;
@@ -13,7 +14,6 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Queue;
 import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
@@ -34,26 +34,21 @@ public abstract class AbstractBufferActuator<Request, Response> implements Buffe
     @Autowired
     private ScheduledTaskService scheduledTaskService;
 
-    private static final int CAPACITY = 10_0000;
-
-    private static final double BUFFER_THRESHOLD = 0.75;
-
-    private static final long MAX_BATCH_COUNT = 1000L;
-
-    private static final long PERIOD = 300;
+    @Autowired
+    private BufferActuatorConfig bufferActuatorConfig;
 
-    private Queue<Request> buffer = new LinkedBlockingQueue(CAPACITY);
+    private Queue<Request> buffer;
 
     private final Lock lock = new ReentrantLock(true);
 
     private volatile boolean running;
 
-    private Class<Response> responseClazz;
+    private final Class<Response> responseClazz = (Class<Response>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[1];
 
     @PostConstruct
     private void init() {
-        responseClazz = (Class<Response>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[1];
-        scheduledTaskService.start(PERIOD, this);
+        buffer = new LinkedBlockingQueue(getQueueCapacity());
+        scheduledTaskService.start(bufferActuatorConfig.getPeriodMillisecond(), this);
     }
 
     /**
@@ -84,20 +79,14 @@ public abstract class AbstractBufferActuator<Request, Response> implements Buffe
         return buffer;
     }
 
+    @Override
+    public int getQueueCapacity() {
+        return bufferActuatorConfig.getQueueCapacity();
+    }
+
     @Override
     public void offer(BufferRequest request) {
         buffer.offer((Request) request);
-
-        // TODO 临时解决方案:生产大于消费问题,限制生产速度
-        int size = buffer.size();
-        if (size >= (CAPACITY * BUFFER_THRESHOLD)) {
-            try {
-                TimeUnit.SECONDS.sleep(30);
-                logger.warn("当前任务队列大小{}已达上限{},请稍等{}秒", size, CAPACITY, 30);
-            } catch (InterruptedException e) {
-                logger.error(e.getMessage());
-            }
-        }
     }
 
     @Override
@@ -128,7 +117,7 @@ public abstract class AbstractBufferActuator<Request, Response> implements Buffe
         if (!queue.isEmpty()) {
             AtomicLong batchCounter = new AtomicLong();
             final Map<String, BufferResponse> map = new LinkedHashMap<>();
-            while (!queue.isEmpty() && batchCounter.get() < MAX_BATCH_COUNT) {
+            while (!queue.isEmpty() && batchCounter.get() < bufferActuatorConfig.getBatchCount()) {
                 Request poll = queue.poll();
                 String key = getPartitionKey(poll);
                 if (!map.containsKey(key)) {
@@ -145,7 +134,7 @@ public abstract class AbstractBufferActuator<Request, Response> implements Buffe
                 } catch (Exception e) {
                     logger.error("[{}]异常{}", key);
                 }
-                logger.info("[{}]{}条,耗时{}秒", key, flushTask.getTaskSize(), (Instant.now().toEpochMilli() - now) / 1000);
+                logger.info("[{}]{}条,耗时{}秒", key, flushTask.getTaskSize(), (Instant.now().toEpochMilli() - now));
             });
             map.clear();
         }

+ 2 - 7
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractFlushStrategy.java

@@ -47,16 +47,11 @@ public abstract class AbstractFlushStrategy implements FlushStrategy {
     }
 
     protected void refreshTotal(String metaId, Result writer) {
-        Meta meta = getMeta(metaId);
-        meta.getFail().getAndAdd(writer.getFailData().size());
-        meta.getSuccess().getAndAdd(writer.getSuccessData().size());
-    }
-
-    protected Meta getMeta(String metaId) {
         Assert.hasText(metaId, "Meta id can not be empty.");
         Meta meta = cacheService.get(metaId, Meta.class);
         Assert.notNull(meta, "Meta can not be null.");
-        return meta;
+        meta.getFail().getAndAdd(writer.getFailData().size());
+        meta.getSuccess().getAndAdd(writer.getSuccessData().size());
     }
 
 }

+ 7 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/BufferActuator.java

@@ -16,6 +16,13 @@ public interface BufferActuator {
      */
     Queue getQueue();
 
+    /**
+     * 获取缓存队列容量
+     *
+     * @return
+     */
+    int getQueueCapacity();
+
     /**
      * 提交任务
      *

+ 5 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/StorageBufferActuator.java

@@ -19,6 +19,11 @@ public class StorageBufferActuator extends AbstractBufferActuator<StorageRequest
     @Autowired
     private StorageService storageService;
 
+    @Override
+    public int getQueueCapacity() {
+        return super.getQueueCapacity() / 4;
+    }
+
     @Override
     protected String getPartitionKey(StorageRequest bufferTask) {
         return bufferTask.getMetaId();

+ 29 - 17
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/WriterBufferActuator.java

@@ -1,23 +1,21 @@
 package org.dbsyncer.parser.flush.impl;
 
 import org.dbsyncer.cache.CacheService;
+import org.dbsyncer.common.config.BufferActuatorConfig;
 import org.dbsyncer.common.model.Result;
+import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.ConnectorFactory;
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.config.ConnectorConfig;
 import org.dbsyncer.parser.ParserFactory;
 import org.dbsyncer.parser.flush.AbstractBufferActuator;
+import org.dbsyncer.parser.model.*;
 import org.dbsyncer.parser.strategy.FlushStrategy;
-import org.dbsyncer.parser.model.WriterRequest;
-import org.dbsyncer.parser.model.WriterResponse;
-import org.dbsyncer.parser.model.BatchWriter;
-import org.dbsyncer.parser.model.Connector;
+import org.dbsyncer.parser.strategy.ParserStrategy;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 import org.springframework.util.Assert;
 
-import java.util.Collections;
-
 /**
  * @author AE86
  * @version 1.0.0
@@ -35,10 +33,14 @@ public class WriterBufferActuator extends AbstractBufferActuator<WriterRequest,
     @Autowired
     private FlushStrategy flushStrategy;
 
+    @Autowired
+    private ParserStrategy parserStrategy;
+
     @Autowired
     private CacheService cacheService;
 
-    private final static int BATCH_SIZE = 100;
+    @Autowired
+    private BufferActuatorConfig bufferActuatorConfig;
 
     @Override
     protected String getPartitionKey(WriterRequest request) {
@@ -48,25 +50,35 @@ public class WriterBufferActuator extends AbstractBufferActuator<WriterRequest,
     @Override
     protected void partition(WriterRequest request, WriterResponse response) {
         response.getDataList().add(request.getRow());
+        if(StringUtil.isNotBlank(request.getMessageId())){
+            response.getMessageIds().add(request.getMessageId());
+        }
         if (response.isMerged()) {
             return;
         }
-        response.setMetaId(request.getMetaId());
-        response.setTargetConnectorId(request.getTargetConnectorId());
-        response.setSourceTableName(request.getSourceTableName());
-        response.setTargetTableName(request.getTargetTableName());
+        response.setTableGroupId(request.getTableGroupId());
         response.setEvent(request.getEvent());
-        response.setFields(Collections.unmodifiableList(request.getFields()));
-        response.setCommand(request.getCommand());
         response.setMerged(true);
     }
 
     @Override
     protected void pull(WriterResponse response) {
-        ConnectorMapper targetConnectorMapper = connectorFactory.connect(getConnectorConfig(response.getTargetConnectorId()));
-        Result result = parserFactory.writeBatch(new BatchWriter(targetConnectorMapper, response.getCommand(), response.getTargetTableName(), response.getEvent(),
-                response.getFields(), response.getDataList(), BATCH_SIZE));
-        flushStrategy.flushIncrementData(response.getMetaId(), result, response.getEvent());
+        // 1、获取配置信息
+        final TableGroup tableGroup = cacheService.get(response.getTableGroupId(), TableGroup.class);
+        final Mapping mapping = cacheService.get(tableGroup.getMappingId(), Mapping.class);
+        final String targetTableName = tableGroup.getTargetTable().getName();
+        final Picker picker = new Picker(tableGroup.getFieldMapping());
+
+        // 2、批量执行同步
+        ConnectorMapper targetConnectorMapper = connectorFactory.connect(getConnectorConfig(mapping.getTargetConnectorId()));
+        Result result = parserFactory.writeBatch(new BatchWriter(targetConnectorMapper, tableGroup.getCommand(), targetTableName, response.getEvent(),
+                picker.getTargetFields(), response.getDataList(), bufferActuatorConfig.getWriterBatchCount()));
+
+        // 3、持久化同步结果
+        flushStrategy.flushIncrementData(mapping.getMetaId(), result, response.getEvent());
+
+        // 4、消息处理完成
+        parserStrategy.complete(response.getMessageIds());
     }
 
     /**

+ 5 - 60
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/AbstractWriter.java

@@ -1,10 +1,5 @@
 package org.dbsyncer.parser.model;
 
-import org.dbsyncer.connector.model.Field;
-
-import java.util.List;
-import java.util.Map;
-
 /**
  * @author AE86
  * @version 1.0.0
@@ -12,66 +7,16 @@ import java.util.Map;
  */
 public abstract class AbstractWriter {
 
-    private String metaId;
-
-    private String targetConnectorId;
-
-    private String sourceTableName;
-
-    private String targetTableName;
-
-    private List<Field> fields;
-
-    private Map<String, String> command;
+    private String tableGroupId;
 
     private String event;
 
-    public String getMetaId() {
-        return metaId;
-    }
-
-    public void setMetaId(String metaId) {
-        this.metaId = metaId;
-    }
-
-    public String getTargetConnectorId() {
-        return targetConnectorId;
-    }
-
-    public void setTargetConnectorId(String targetConnectorId) {
-        this.targetConnectorId = targetConnectorId;
-    }
-
-    public String getSourceTableName() {
-        return sourceTableName;
-    }
-
-    public void setSourceTableName(String sourceTableName) {
-        this.sourceTableName = sourceTableName;
-    }
-
-    public String getTargetTableName() {
-        return targetTableName;
-    }
-
-    public void setTargetTableName(String targetTableName) {
-        this.targetTableName = targetTableName;
-    }
-
-    public List<Field> getFields() {
-        return fields;
-    }
-
-    public void setFields(List<Field> fields) {
-        this.fields = fields;
-    }
-
-    public Map<String, String> getCommand() {
-        return command;
+    public String getTableGroupId() {
+        return tableGroupId;
     }
 
-    public void setCommand(Map<String, String> command) {
-        this.command = command;
+    public void setTableGroupId(String tableGroupId) {
+        this.tableGroupId = tableGroupId;
     }
 
     public String getEvent() {

+ 4 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Picker.java

@@ -7,6 +7,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 public class Picker {
 
@@ -66,4 +67,7 @@ public class Picker {
         return targetFields;
     }
 
+    public Map<String, Field> getTargetFieldMap() {
+        return targetFields.stream().collect(Collectors.toMap(Field::getName, f -> f, (k1, k2) -> k1));
+    }
 }

+ 10 - 13
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterRequest.java

@@ -1,9 +1,7 @@
 package org.dbsyncer.parser.model;
 
-import org.dbsyncer.connector.model.Field;
 import org.dbsyncer.parser.flush.BufferRequest;
 
-import java.util.List;
 import java.util.Map;
 
 /**
@@ -13,24 +11,23 @@ import java.util.Map;
  */
 public class WriterRequest extends AbstractWriter implements BufferRequest {
 
-    private String tableGroupId;
+    private String messageId;
 
     private Map row;
 
-    public WriterRequest(String tableGroupId, Map row, String metaId, String targetConnectorId, String sourceTableName, String targetTableName, String event, List<Field> fields, Map<String, String> command) {
-        setMetaId(metaId);
-        setTargetConnectorId(targetConnectorId);
-        setSourceTableName(sourceTableName);
-        setTargetTableName(targetTableName);
+    public WriterRequest(String tableGroupId, String event, Map row) {
+        this(null, tableGroupId, event, row);
+    }
+
+    public WriterRequest(String messageId, String tableGroupId, String event, Map row) {
+        setTableGroupId(tableGroupId);
         setEvent(event);
-        setFields(fields);
-        setCommand(command);
-        this.tableGroupId = tableGroupId;
+        this.messageId = messageId;
         this.row = row;
     }
 
-    public String getTableGroupId() {
-        return tableGroupId;
+    public String getMessageId() {
+        return messageId;
     }
 
     public Map getRow() {

+ 3 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterResponse.java

@@ -14,6 +14,7 @@ import java.util.Map;
 public class WriterResponse extends AbstractWriter implements BufferResponse {
 
     private List<Map> dataList = new LinkedList<>();
+    private List<String> messageIds = new LinkedList<>();
 
     private boolean isMerged;
 
@@ -26,8 +27,8 @@ public class WriterResponse extends AbstractWriter implements BufferResponse {
         return dataList;
     }
 
-    public void setDataList(List<Map> dataList) {
-        this.dataList = dataList;
+    public List<String> getMessageIds() {
+        return messageIds;
     }
 
     public boolean isMerged() {

+ 16 - 4
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/ParserStrategy.java

@@ -1,11 +1,23 @@
 package org.dbsyncer.parser.strategy;
 
-import org.dbsyncer.common.event.RowChangedEvent;
-import org.dbsyncer.parser.model.Mapping;
-import org.dbsyncer.parser.model.TableGroup;
+import java.util.List;
+import java.util.Map;
 
 public interface ParserStrategy {
 
-    void execute(Mapping mapping, TableGroup tableGroup, RowChangedEvent event);
+    /**
+     * 同步消息
+     *
+     * @param tableGroupId
+     * @param event
+     * @param data
+     */
+    void execute(String tableGroupId, String event, Map<String, Object> data);
 
+    /**
+     * 完成同步后,执行回调删除消息
+     *
+     * @param messageIds
+     */
+    void complete(List<String> messageIds);
 }

+ 7 - 53
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/DisableWriterBufferActuatorStrategy.java

@@ -1,67 +1,21 @@
 package org.dbsyncer.parser.strategy.impl;
 
-import com.google.protobuf.ByteString;
-import org.dbsyncer.cache.CacheService;
-import org.dbsyncer.common.event.RowChangedEvent;
-import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.connector.constant.ConnectorConstant;
-import org.dbsyncer.parser.flush.BufferActuator;
-import org.dbsyncer.parser.model.Mapping;
-import org.dbsyncer.parser.model.TableGroup;
-import org.dbsyncer.parser.model.WriterRequest;
+import org.dbsyncer.parser.AbstractWriterBinlog;
 import org.dbsyncer.parser.strategy.ParserStrategy;
-import org.dbsyncer.storage.binlog.AbstractBinlogRecorder;
-import org.dbsyncer.storage.binlog.proto.BinlogMessage;
-import org.dbsyncer.storage.binlog.proto.Data;
-import org.dbsyncer.storage.binlog.proto.EventEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
 
+import java.util.List;
 import java.util.Map;
-import java.util.Queue;
 
-public final class DisableWriterBufferActuatorStrategy extends AbstractBinlogRecorder<WriterRequest> implements ParserStrategy {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    @Autowired
-    private BufferActuator writerBufferActuator;
-
-    @Autowired
-    private CacheService cacheService;
-
-    @Override
-    public void execute(Mapping mapping, TableGroup tableGroup, RowChangedEvent event) {
-        try {
-            EventEnum eventEnum = EventEnum.valueOf(event.getEvent());
-            Map<String, Object> data = StringUtil.equals(ConnectorConstant.OPERTION_DELETE, eventEnum.name()) ? event.getBefore() : event.getAfter();
-            BinlogMessage.Builder builder = BinlogMessage.newBuilder()
-                    .setTableGroupId(tableGroup.getId())
-                    .setEvent(eventEnum);
-            data.forEach((k, v) -> {
-                if (null != v && v instanceof String) {
-                    builder.addData(Data.newBuilder().putRow(k, ByteString.copyFromUtf8((String) v)));
-                }
-            });
-            flush(builder.build());
-        } catch (Exception e) {
-            logger.error(e.getMessage());
-        }
-    }
+public final class DisableWriterBufferActuatorStrategy extends AbstractWriterBinlog implements ParserStrategy {
 
     @Override
-    protected String getTaskName() {
-        return "WriterBinlog";
+    public void execute(String tableGroupId, String event, Map<String, Object> data) {
+        super.flush(tableGroupId, event, data);
     }
 
     @Override
-    protected Queue getQueue() {
-        return writerBufferActuator.getQueue();
+    public void complete(List<String> messageIds) {
+        super.complete(messageIds);
     }
 
-    @Override
-    protected WriterRequest deserialize(BinlogMessage message) {
-        return null;
-    }
 }

+ 21 - 25
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/EnableWriterBufferActuatorStrategy.java

@@ -1,48 +1,44 @@
 package org.dbsyncer.parser.strategy.impl;
 
-import org.dbsyncer.common.event.RowChangedEvent;
-import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.connector.constant.ConnectorConstant;
+import org.dbsyncer.parser.AbstractWriterBinlog;
 import org.dbsyncer.parser.flush.BufferActuator;
-import org.dbsyncer.parser.model.Mapping;
-import org.dbsyncer.parser.model.Picker;
-import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.parser.model.WriterRequest;
 import org.dbsyncer.parser.strategy.ParserStrategy;
-import org.dbsyncer.parser.util.ConvertUtil;
-import org.dbsyncer.plugin.PluginFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.stereotype.Component;
 
+import javax.annotation.PostConstruct;
+import java.util.List;
 import java.util.Map;
 
 @Component
-@ConditionalOnProperty(value = "dbsyncer.parser.writer.buffer.actuator.enabled", havingValue = "true")
-public final class EnableWriterBufferActuatorStrategy implements ParserStrategy {
+@ConditionalOnProperty(value = "dbsyncer.parser.flush.buffer.actuator.speed.enabled", havingValue = "true")
+public final class EnableWriterBufferActuatorStrategy extends AbstractWriterBinlog implements ParserStrategy {
 
-    @Autowired
-    private PluginFactory pluginFactory;
+    private static final double BUFFER_THRESHOLD = 0.8;
 
     @Autowired
     private BufferActuator writerBufferActuator;
 
-    @Override
-    public void execute(Mapping mapping, TableGroup tableGroup, RowChangedEvent event) {
-        // 1、获取映射字段
-        final String eventName = event.getEvent();
-        Map<String, Object> data = StringUtil.equals(ConnectorConstant.OPERTION_DELETE, eventName) ? event.getBefore() : event.getAfter();
-        Picker picker = new Picker(tableGroup.getFieldMapping());
-        Map target = picker.pickData(data);
+    private static double limit;
 
-        // 2、参数转换
-        ConvertUtil.convert(tableGroup.getConvert(), target);
+    @PostConstruct
+    private void init() {
+        limit = Math.ceil(getQueueCapacity() * BUFFER_THRESHOLD);
+    }
 
-        // 3、插件转换
-        pluginFactory.convert(tableGroup.getPlugin(), eventName, data, target);
+    @Override
+    public void execute(String tableGroupId, String event, Map<String, Object> data) {
+        if (getQueue().size() >= limit) {
+            super.flush(tableGroupId, event, data);
+        }
+        writerBufferActuator.offer(new WriterRequest(tableGroupId, event, data));
+    }
 
-        // 4、写入缓冲执行器
-        writerBufferActuator.offer(new WriterRequest(tableGroup.getId(), target, mapping.getMetaId(), mapping.getTargetConnectorId(), event.getSourceTableName(), event.getTargetTableName(), eventName, picker.getTargetFields(), tableGroup.getCommand()));
+    @Override
+    public void complete(List<String> messageIds) {
+        super.complete(messageIds);
     }
 
 }

+ 8 - 2
dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/PluginFactory.java

@@ -1,7 +1,10 @@
 package org.dbsyncer.plugin;
 
 import org.apache.commons.io.FileUtils;
+import org.dbsyncer.common.model.FullConvertContext;
+import org.dbsyncer.common.model.IncrementConvertContext;
 import org.dbsyncer.common.spi.ConvertService;
+import org.dbsyncer.common.spi.ProxyApplicationContext;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.plugin.config.Plugin;
 import org.slf4j.Logger;
@@ -45,6 +48,9 @@ public class PluginFactory {
     @Autowired
     private Map<String, ConvertService> service;
 
+    @Autowired
+    private ProxyApplicationContext applicationContextProxy;
+
     @PostConstruct
     private void init() {
         Map<String, ConvertService> unmodifiable = new LinkedHashMap<>();
@@ -92,13 +98,13 @@ public class PluginFactory {
 
     public void convert(Plugin plugin, List<Map> source, List<Map> target) {
         if (null != plugin && service.containsKey(plugin.getClassName())) {
-            service.get(plugin.getClassName()).convert(source, target);
+            service.get(plugin.getClassName()).convert(new FullConvertContext(applicationContextProxy, source, target));
         }
     }
 
     public void convert(Plugin plugin, String event, Map<String, Object> source, Map<String, Object> target) {
         if (null != plugin && service.containsKey(plugin.getClassName())) {
-            service.get(plugin.getClassName()).convert(event, source, target);
+            service.get(plugin.getClassName()).convert(new IncrementConvertContext(applicationContextProxy, event, source, target));
         }
     }
 

+ 250 - 0
dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/proxy/ProxyApplicationContextImpl.java

@@ -0,0 +1,250 @@
+package org.dbsyncer.plugin.proxy;
+
+import org.dbsyncer.common.spi.ProxyApplicationContext;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.BeanFactory;
+import org.springframework.beans.factory.NoSuchBeanDefinitionException;
+import org.springframework.beans.factory.ObjectProvider;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.MessageSourceResolvable;
+import org.springframework.context.NoSuchMessageException;
+import org.springframework.core.ResolvableType;
+import org.springframework.core.env.Environment;
+import org.springframework.core.io.Resource;
+import org.springframework.stereotype.Component;
+
+import java.io.IOException;
+import java.lang.annotation.Annotation;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 15:14
+ */
+@Component("proxyApplicationContext")
+public class ProxyApplicationContextImpl implements ProxyApplicationContext {
+
+    @Qualifier("webApplicationContext")
+    @Autowired
+    private ApplicationContext applicationContext;
+
+    @Override
+    public String getId() {
+        return applicationContext.getId();
+    }
+
+    @Override
+    public String getApplicationName() {
+        return applicationContext.getApplicationName();
+    }
+
+    @Override
+    public String getDisplayName() {
+        return applicationContext.getDisplayName();
+    }
+
+    @Override
+    public long getStartupDate() {
+        return applicationContext.getStartupDate();
+    }
+
+    @Override
+    public ApplicationContext getParent() {
+        return applicationContext.getParent();
+    }
+
+    @Override
+    public AutowireCapableBeanFactory getAutowireCapableBeanFactory() throws IllegalStateException {
+        return applicationContext.getAutowireCapableBeanFactory();
+    }
+
+    @Override
+    public BeanFactory getParentBeanFactory() {
+        return applicationContext.getParentBeanFactory();
+    }
+
+    @Override
+    public boolean containsLocalBean(String name) {
+        return applicationContext.containsLocalBean(name);
+    }
+
+    @Override
+    public boolean containsBeanDefinition(String beanName) {
+        return applicationContext.containsBeanDefinition(beanName);
+    }
+
+    @Override
+    public int getBeanDefinitionCount() {
+        return applicationContext.getBeanDefinitionCount();
+    }
+
+    @Override
+    public String[] getBeanDefinitionNames() {
+        return applicationContext.getBeanDefinitionNames();
+    }
+
+    @Override
+    public String[] getBeanNamesForType(ResolvableType type) {
+        return applicationContext.getBeanNamesForType(type);
+    }
+
+    @Override
+    public String[] getBeanNamesForType(ResolvableType type, boolean includeNonSingletons, boolean allowEagerInit) {
+        return applicationContext.getBeanNamesForType(type, includeNonSingletons, allowEagerInit);
+    }
+
+    @Override
+    public String[] getBeanNamesForType(Class<?> type) {
+        return applicationContext.getBeanNamesForType(type);
+    }
+
+    @Override
+    public String[] getBeanNamesForType(Class<?> type, boolean includeNonSingletons, boolean allowEagerInit) {
+        return applicationContext.getBeanNamesForType(type, includeNonSingletons, allowEagerInit);
+    }
+
+    @Override
+    public <T> Map<String, T> getBeansOfType(Class<T> type) throws BeansException {
+        return applicationContext.getBeansOfType(type);
+    }
+
+    @Override
+    public <T> Map<String, T> getBeansOfType(Class<T> type, boolean includeNonSingletons, boolean allowEagerInit) throws BeansException {
+        return applicationContext.getBeansOfType(type, includeNonSingletons, allowEagerInit);
+    }
+
+    @Override
+    public String[] getBeanNamesForAnnotation(Class<? extends Annotation> annotationType) {
+        return applicationContext.getBeanNamesForAnnotation(annotationType);
+    }
+
+    @Override
+    public Map<String, Object> getBeansWithAnnotation(Class<? extends Annotation> annotationType) throws BeansException {
+        return applicationContext.getBeansWithAnnotation(annotationType);
+    }
+
+    @Override
+    public <A extends Annotation> A findAnnotationOnBean(String beanName, Class<A> annotationType) throws NoSuchBeanDefinitionException {
+        return applicationContext.findAnnotationOnBean(beanName, annotationType);
+    }
+
+    @Override
+    public Object getBean(String name) throws BeansException {
+        return applicationContext.getBean(name);
+    }
+
+    @Override
+    public <T> T getBean(String name, Class<T> requiredType) throws BeansException {
+        return applicationContext.getBean(name, requiredType);
+    }
+
+    @Override
+    public Object getBean(String name, Object... args) throws BeansException {
+        return applicationContext.getBean(name, args);
+    }
+
+    @Override
+    public <T> T getBean(Class<T> requiredType) throws BeansException {
+        return applicationContext.getBean(requiredType);
+    }
+
+    @Override
+    public <T> T getBean(Class<T> requiredType, Object... args) throws BeansException {
+        return applicationContext.getBean(requiredType, args);
+    }
+
+    @Override
+    public <T> ObjectProvider<T> getBeanProvider(Class<T> requiredType) {
+        return applicationContext.getBeanProvider(requiredType);
+    }
+
+    @Override
+    public <T> ObjectProvider<T> getBeanProvider(ResolvableType requiredType) {
+        return applicationContext.getBeanProvider(requiredType);
+    }
+
+    @Override
+    public boolean containsBean(String name) {
+        return applicationContext.containsBean(name);
+    }
+
+    @Override
+    public boolean isSingleton(String name) throws NoSuchBeanDefinitionException {
+        return applicationContext.isSingleton(name);
+    }
+
+    @Override
+    public boolean isPrototype(String name) throws NoSuchBeanDefinitionException {
+        return applicationContext.isPrototype(name);
+    }
+
+    @Override
+    public boolean isTypeMatch(String name, ResolvableType typeToMatch) throws NoSuchBeanDefinitionException {
+        return applicationContext.isTypeMatch(name, typeToMatch);
+    }
+
+    @Override
+    public boolean isTypeMatch(String name, Class<?> typeToMatch) throws NoSuchBeanDefinitionException {
+        return applicationContext.isTypeMatch(name, typeToMatch);
+    }
+
+    @Override
+    public Class<?> getType(String name) throws NoSuchBeanDefinitionException {
+        return applicationContext.getType(name);
+    }
+
+    @Override
+    public Class<?> getType(String name, boolean allowFactoryBeanInit) throws NoSuchBeanDefinitionException {
+        return applicationContext.getType(name, allowFactoryBeanInit);
+    }
+
+    @Override
+    public String[] getAliases(String name) {
+        return applicationContext.getAliases(name);
+    }
+
+    @Override
+    public void publishEvent(Object event) {
+        applicationContext.publishEvent(event);
+    }
+
+    @Override
+    public String getMessage(String code, Object[] args, String defaultMessage, Locale locale) {
+        return applicationContext.getMessage(code, args, defaultMessage, locale);
+    }
+
+    @Override
+    public String getMessage(String code, Object[] args, Locale locale) throws NoSuchMessageException {
+        return applicationContext.getMessage(code, args, locale);
+    }
+
+    @Override
+    public String getMessage(MessageSourceResolvable resolvable, Locale locale) throws NoSuchMessageException {
+        return applicationContext.getMessage(resolvable, locale);
+    }
+
+    @Override
+    public Environment getEnvironment() {
+        return applicationContext.getEnvironment();
+    }
+
+    @Override
+    public Resource[] getResources(String locationPattern) throws IOException {
+        return applicationContext.getResources(locationPattern);
+    }
+
+    @Override
+    public Resource getResource(String location) {
+        return applicationContext.getResource(location);
+    }
+
+    @Override
+    public ClassLoader getClassLoader() {
+        return applicationContext.getClassLoader();
+    }
+}

+ 5 - 6
dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/service/DemoConvertServiceImpl.java

@@ -1,15 +1,14 @@
 package org.dbsyncer.plugin.service;
 
 import org.dbsyncer.common.config.AppConfig;
+import org.dbsyncer.common.model.FullConvertContext;
+import org.dbsyncer.common.model.IncrementConvertContext;
 import org.dbsyncer.common.spi.ConvertService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
-import java.util.List;
-import java.util.Map;
-
 @Component
 public class DemoConvertServiceImpl implements ConvertService {
 
@@ -19,12 +18,12 @@ public class DemoConvertServiceImpl implements ConvertService {
     private AppConfig appConfig;
 
     @Override
-    public void convert(List<Map> source, List<Map> target) {
+    public void convert(FullConvertContext context) {
     }
 
     @Override
-    public void convert(String event, Map source, Map target) {
-        logger.info("插件正在处理同步数据,事件:{},数据:{}", event, source);
+    public void convert(IncrementConvertContext context) {
+        logger.info("插件正在处理同步数据,事件:{},数据:{}", context.getEvent(), context.getSource());
     }
 
     @Override

+ 38 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/AbstractBinlogActuator.java

@@ -0,0 +1,38 @@
+package org.dbsyncer.storage.binlog;
+
+import org.dbsyncer.storage.enums.BinlogStatusEnum;
+import org.dbsyncer.storage.model.BinlogIndex;
+
+import java.time.LocalDateTime;
+
+public abstract class AbstractBinlogActuator implements BinlogActuator {
+
+    private BinlogIndex binlogIndex;
+
+    private BinlogStatusEnum status = BinlogStatusEnum.RUNNING;
+
+    protected void initBinlogIndex(BinlogIndex binlogIndex) {
+        binlogIndex.addLock(this);
+        this.binlogIndex = binlogIndex;
+    }
+
+    protected void refreshBinlogIndexUpdateTime() {
+        binlogIndex.setUpdateTime(LocalDateTime.now());
+    }
+
+    @Override
+    public String getFileName() {
+        return binlogIndex.getFileName();
+    }
+
+    @Override
+    public boolean isRunning() {
+        return status == BinlogStatusEnum.RUNNING;
+    }
+
+    @Override
+    public void stop() {
+        this.status = BinlogStatusEnum.STOP;
+    }
+
+}

+ 177 - 59
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/AbstractBinlogRecorder.java

@@ -1,17 +1,43 @@
 package org.dbsyncer.storage.binlog;
 
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
+import org.dbsyncer.common.config.BinlogRecorderConfig;
+import org.dbsyncer.common.model.Paging;
 import org.dbsyncer.common.scheduled.ScheduledTaskJob;
 import org.dbsyncer.common.scheduled.ScheduledTaskService;
+import org.dbsyncer.common.snowflake.SnowflakeIdWorker;
+import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.storage.binlog.proto.BinlogMessage;
+import org.dbsyncer.storage.constant.BinlogConstant;
+import org.dbsyncer.storage.enums.IndexFieldResolverEnum;
+import org.dbsyncer.storage.lucene.Shard;
+import org.dbsyncer.storage.query.Option;
+import org.dbsyncer.storage.util.DocumentUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.DisposableBean;
 import org.springframework.beans.factory.annotation.Autowired;
 
 import javax.annotation.PostConstruct;
+import java.io.File;
 import java.io.IOException;
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 import java.util.Queue;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -20,99 +46,191 @@ import java.util.concurrent.locks.ReentrantLock;
  * @version 1.0.0
  * @date 2022/6/8 0:53
  */
-public abstract class AbstractBinlogRecorder<Message> implements BinlogRecorder, ScheduledTaskJob, DisposableBean {
+public abstract class AbstractBinlogRecorder<Message> implements BinlogRecorder, DisposableBean {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
+    private static final String PATH = new StringBuilder(System.getProperty("user.dir")).append(File.separatorChar).append("data").append(File.separatorChar).append("data").append(File.separatorChar).toString();
+
     @Autowired
     private ScheduledTaskService scheduledTaskService;
 
-    private static final long MAX_BATCH_COUNT = 100L;
+    @Autowired
+    private SnowflakeIdWorker snowflakeIdWorker;
 
-    private static final long PERIOD = 3000;
+    @Autowired
+    private BinlogRecorderConfig binlogRecorderConfig;
 
-    private final Lock lock = new ReentrantLock(true);
+    private static Queue<BinlogMessage> queue;
 
-    private volatile boolean running;
+    private static Shard shard;
 
-    private BinlogContext context;
+    private WriterTask writerTask = new WriterTask();
+
+    private ReaderTask readerTask = new ReaderTask();
 
     @PostConstruct
     private void init() throws IOException {
-        // /data/binlog/WriterBinlog/
-        context = new BinlogContext(getTaskName());
-        scheduledTaskService.start(PERIOD, this);
+        queue = new LinkedBlockingQueue(binlogRecorderConfig.getQueueCapacity());
+        shard = new Shard(PATH + getTaskName());
+        scheduledTaskService.start(binlogRecorderConfig.getWriterPeriodMillisecond(), writerTask);
+        scheduledTaskService.start(binlogRecorderConfig.getReaderPeriodMillisecond(), readerTask);
     }
 
     /**
-     * 获取任务名称
+     * 反序列化消息
      *
+     * @param message
      * @return
      */
-    protected String getTaskName() {
-        return getClass().getSimpleName();
+    protected abstract Message deserialize(String messageId, BinlogMessage message);
+
+    @Override
+    public void flush(BinlogMessage message) {
+        queue.offer(message);
     }
 
-    /**
-     * 获取缓存队列
-     *
-     * @return
-     */
-    protected abstract Queue getQueue();
+    @Override
+    public void destroy() throws IOException {
+        shard.close();
+    }
+
+    @Override
+    public void complete(List<String> messageIds) {
+        if (!CollectionUtils.isEmpty(messageIds)) {
+            try {
+                int size = messageIds.size();
+                Term[] terms = new Term[size];
+                for (int i = 0; i < size; i++) {
+                    terms[i] = new Term(BinlogConstant.BINLOG_ID, messageIds.get(i));
+                }
+                shard.deleteBatch(terms);
+            } catch (IOException e) {
+                logger.error(e.getMessage());
+            }
+        }
+    }
 
     /**
-     * 反序列化任务
-     *
-     * @param message
-     * @return
+     * 合并缓存队列任务到磁盘
      */
-    protected abstract Message deserialize(BinlogMessage message);
+    final class WriterTask implements ScheduledTaskJob {
 
-    @Override
-    public void run() {
-        if (running || !getQueue().isEmpty()) {
-            return;
-        }
+        @Override
+        public void run() {
+            if (queue.isEmpty()) {
+                return;
+            }
 
-        final Lock binlogLock = lock;
-        boolean locked = false;
-        try {
-            locked = binlogLock.tryLock();
-            if (locked) {
-                running = true;
-                doParse();
+            List<Document> tasks = new ArrayList<>();
+            int count = 0;
+            long now = Instant.now().toEpochMilli();
+            while (!queue.isEmpty() && count < binlogRecorderConfig.getBatchCount()) {
+                BinlogMessage message = queue.poll();
+                if (null != message) {
+                    tasks.add(DocumentUtil.convertBinlog2Doc(String.valueOf(snowflakeIdWorker.nextId()), BinlogConstant.READY, new BytesRef(message.toByteArray()), now));
+                }
+                count++;
             }
-        } catch (Exception e) {
-            logger.error(e.getMessage());
-        } finally {
-            if (locked) {
-                running = false;
-                binlogLock.unlock();
+
+            if (!CollectionUtils.isEmpty(tasks)) {
+                try {
+                    shard.insertBatch(tasks);
+                } catch (IOException e) {
+                    logger.error(e.getMessage());
+                }
             }
         }
     }
 
-    @Override
-    public void flush(BinlogMessage message) throws IOException {
-        context.write(message);
-    }
+    /**
+     * 从磁盘读取日志到任务队列
+     */
+    final class ReaderTask implements ScheduledTaskJob {
 
-    @Override
-    public void destroy() {
-        context.close();
-    }
+        private final Lock lock = new ReentrantLock(true);
+
+        private volatile boolean running;
 
-    private void doParse() throws IOException {
-        byte[] line;
-        AtomicInteger batchCounter = new AtomicInteger();
-        while (batchCounter.get() < MAX_BATCH_COUNT && null != (line = context.readLine())) {
-            deserialize(BinlogMessage.parseFrom(line));
-            // getQueue().offer(deserialize(message));
-            batchCounter.getAndAdd(1);
+        @Override
+        public void run() {
+            if (running || (binlogRecorderConfig.getBatchCount() * 2) + getQueue().size() >= getQueueCapacity()) {
+                return;
+            }
+
+            final Lock binlogLock = lock;
+            boolean locked = false;
+            try {
+                locked = binlogLock.tryLock();
+                if (locked) {
+                    running = true;
+                    doParse();
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage());
+            } finally {
+                if (locked) {
+                    running = false;
+                    binlogLock.unlock();
+                }
+            }
         }
 
-        if (batchCounter.get() > 0) {
-            context.flush();
+        private void doParse() throws IOException {
+            //  查询[待处理] 或 [处理中 & 处理超时]
+            long maxProcessingSeconds = Timestamp.valueOf(LocalDateTime.now().minusSeconds(binlogRecorderConfig.getMaxProcessingSeconds())).getTime();
+            BooleanQuery query = new BooleanQuery.Builder()
+                    .add(new BooleanQuery.Builder()
+                            .add(IntPoint.newExactQuery(BinlogConstant.BINLOG_STATUS, BinlogConstant.READY), BooleanClause.Occur.MUST)
+                            .build(), BooleanClause.Occur.SHOULD)
+                    .add(new BooleanQuery.Builder()
+                            .add(IntPoint.newExactQuery(BinlogConstant.BINLOG_STATUS, BinlogConstant.PROCESSING), BooleanClause.Occur.MUST)
+                            .add(LongPoint.newRangeQuery(BinlogConstant.BINLOG_TIME, Long.MIN_VALUE, maxProcessingSeconds), BooleanClause.Occur.MUST)
+                            .build(), BooleanClause.Occur.SHOULD)
+                    .build();
+            Option option = new Option(query);
+            option.addIndexFieldResolverEnum(BinlogConstant.BINLOG_STATUS, IndexFieldResolverEnum.INT);
+            option.addIndexFieldResolverEnum(BinlogConstant.BINLOG_CONTENT, IndexFieldResolverEnum.BINARY);
+            option.addIndexFieldResolverEnum(BinlogConstant.BINLOG_TIME, IndexFieldResolverEnum.LONG);
+
+            // 优先处理最早记录
+            Sort sort = new Sort(new SortField(BinlogConstant.BINLOG_TIME, SortField.Type.LONG));
+            Paging paging = shard.query(option, 1, binlogRecorderConfig.getBatchCount(), sort);
+            if (CollectionUtils.isEmpty(paging.getData())) {
+                return;
+            }
+
+            List<Map> list = (List<Map>) paging.getData();
+            final int size = list.size();
+            final List<Message> messages = new ArrayList<>(size);
+            final List<Document> updateDocs = new ArrayList<>(size);
+            final Term[] deleteIds = new Term[size];
+            for (int i = 0; i < size; i++) {
+                Map row = list.get(i);
+                String id = (String) row.get(BinlogConstant.BINLOG_ID);
+                Integer status = (Integer) row.get(BinlogConstant.BINLOG_STATUS);
+                BytesRef ref = (BytesRef) row.get(BinlogConstant.BINLOG_CONTENT);
+                if (BinlogConstant.PROCESSING == status) {
+                    logger.warn("存在超时未处理数据,正在重试,建议优化配置参数[max-processing-seconds={}].", binlogRecorderConfig.getMaxProcessingSeconds());
+                }
+                deleteIds[i] = new Term(BinlogConstant.BINLOG_ID, id);
+                String newId = String.valueOf(snowflakeIdWorker.nextId());
+                try {
+                    Message message = deserialize(newId, BinlogMessage.parseFrom(ref.bytes));
+                    if (null != message) {
+                        messages.add(message);
+                        updateDocs.add(DocumentUtil.convertBinlog2Doc(newId, BinlogConstant.PROCESSING, ref, Instant.now().toEpochMilli()));
+                    }
+                } catch (InvalidProtocolBufferException e) {
+                    logger.error(e.getMessage());
+                }
+            }
+
+            // 如果在更新消息状态的过程中服务被中止,为保证数据的安全性,重启后消息可能会同步2次)
+            shard.insertBatch(updateDocs);
+            shard.deleteBatch(deleteIds);
+            getQueue().addAll(messages);
         }
     }
+
 }

+ 30 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogActuator.java

@@ -0,0 +1,30 @@
+package org.dbsyncer.storage.binlog;
+
+import java.io.Closeable;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/26 23:23
+ */
+public interface BinlogActuator extends Closeable {
+
+    /**
+     * 获取索引文件名
+     *
+     * @return
+     */
+    String getFileName();
+
+    /**
+     * 状态是否为运行中
+     *
+     * @return
+     */
+    boolean isRunning();
+
+    /**
+     * 标记为停止状态
+     */
+    void stop();
+}

+ 98 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogColumnValue.java

@@ -0,0 +1,98 @@
+package org.dbsyncer.storage.binlog;
+
+import com.google.protobuf.ByteString;
+import org.dbsyncer.common.column.AbstractColumnValue;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/30 22:39
+ */
+public class BinlogColumnValue extends AbstractColumnValue<ByteString> {
+
+    private final ByteBuffer buffer = ByteBuffer.allocate(8);
+
+    @Override
+    public String asString() {
+        return getValue().toStringUtf8();
+    }
+
+    @Override
+    public byte[] asByteArray() {
+        return getValue().toByteArray();
+    }
+
+    @Override
+    public Short asShort() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 2);
+        buffer.flip();
+        return buffer.asShortBuffer().get();
+    }
+
+    @Override
+    public Integer asInteger() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 4);
+        buffer.flip();
+        return buffer.asIntBuffer().get();
+    }
+
+    @Override
+    public Long asLong() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 8);
+        buffer.flip();
+        return buffer.asLongBuffer().get();
+    }
+
+    @Override
+    public Float asFloat() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 4);
+        buffer.flip();
+        return buffer.asFloatBuffer().get();
+    }
+
+    @Override
+    public Double asDouble() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 8);
+        buffer.flip();
+        return buffer.asDoubleBuffer().get();
+    }
+
+    @Override
+    public Boolean asBoolean() {
+        buffer.clear();
+        buffer.put(asByteArray(), 0, 2);
+        buffer.flip();
+        return buffer.asShortBuffer().get() == 1;
+    }
+
+    @Override
+    public BigDecimal asBigDecimal() {
+        return new BigDecimal(asString());
+    }
+
+    @Override
+    public Date asDate() {
+        return new Date(asLong());
+    }
+
+    @Override
+    public Timestamp asTimestamp() {
+        return new Timestamp(asLong());
+    }
+
+    @Override
+    public Time asTime() {
+        return new Time(asLong());
+    }
+}

+ 0 - 178
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogContext.java

@@ -1,178 +0,0 @@
-package org.dbsyncer.storage.binlog;
-
-import org.apache.commons.io.FileUtils;
-import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.common.util.JsonUtil;
-import org.dbsyncer.common.util.NumberUtil;
-import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.storage.binlog.proto.BinlogMessage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.util.Assert;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.sql.Timestamp;
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-
-public class BinlogContext implements Closeable {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    private static final long BINLOG_MAX_SIZE = 256 * 1024 * 1024;
-
-    private static final int BINLOG_EXPIRE_DAYS = 7;
-
-    private static final String LINE_SEPARATOR = System.lineSeparator();
-
-    private static final Charset DEFAULT_CHARSET = Charset.defaultCharset();
-
-    private static final String BINLOG = "binlog";
-
-    private static final String BINLOG_INDEX = BINLOG + ".index";
-
-    private static final String BINLOG_CONFIG = BINLOG + ".config";
-
-    private List<String> index = new LinkedList<>();
-
-    private String path;
-
-    private File configFile;
-
-    private File indexFile;
-
-    private Binlog config;
-
-    private BinlogPipeline pipeline;
-
-    public BinlogContext(String taskName) throws IOException {
-        path = new StringBuilder(System.getProperty("user.dir")).append(File.separatorChar)
-                .append("data").append(File.separatorChar)
-                .append("binlog").append(File.separatorChar)
-                .append(taskName).append(File.separatorChar)
-                .toString();
-        File dir = new File(path);
-        if (!dir.exists()) {
-            FileUtils.forceMkdir(dir);
-        }
-
-        // binlog.index
-        indexFile = new File(path + BINLOG_INDEX);
-        // binlog.config
-        configFile = new File(path + BINLOG_CONFIG);
-        if (!configFile.exists()) {
-            // binlog.000001
-            config = initBinlogConfig(createNewBinlogName(0));
-        }
-
-        // read index
-        Assert.isTrue(indexFile.exists(), String.format("The index file '%s' is not exist.", indexFile.getName()));
-        index.addAll(FileUtils.readLines(indexFile, DEFAULT_CHARSET));
-
-        // delete index file
-        deleteExpiredIndexFile();
-
-        // {"binlog":"binlog.000001","pos":0}
-        if (null == config) {
-            config = JsonUtil.jsonToObj(FileUtils.readFileToString(configFile, DEFAULT_CHARSET), Binlog.class);
-        }
-
-        // no index
-        if (CollectionUtils.isEmpty(index)) {
-            // binlog.000002
-            config = initBinlogConfig(createNewBinlogName(getBinlogIndex(config.getFileName())));
-            index.addAll(FileUtils.readLines(indexFile, DEFAULT_CHARSET));
-        }
-
-        // 配置文件已失效,取最早的索引文件
-        int indexOf = index.indexOf(config.getFileName());
-        if (-1 == indexOf) {
-            logger.warn("The binlog file '{}' is expired.", config.getFileName());
-            config = new Binlog().setFileName(index.get(0));
-            write(configFile, JsonUtil.objToJson(config), false);
-        }
-
-        pipeline = new BinlogPipeline(new File(path + config.getFileName()), config.getPosition());
-        logger.info("BinlogContext initialized with config:{}", JsonUtil.objToJson(config));
-    }
-
-    private Binlog initBinlogConfig(String binlogName) throws IOException {
-        Binlog config = new Binlog().setFileName(binlogName);
-        write(configFile, JsonUtil.objToJson(config), false);
-        write(indexFile, binlogName + LINE_SEPARATOR, false);
-        write(new File(path + binlogName), "", false);
-        return config;
-    }
-
-    private void deleteExpiredIndexFile() throws IOException {
-        if (CollectionUtils.isEmpty(index)) {
-            return;
-        }
-        Set<String> shouldDelete = new HashSet<>();
-        for (String name : index) {
-            File file = new File(path + name);
-            if (!file.exists()) {
-                shouldDelete.add(name);
-                logger.info("Delete invalid binlog file '{}'.", name);
-                continue;
-            }
-            if (isExpiredFile(file)) {
-                FileUtils.forceDelete(file);
-                shouldDelete.add(name);
-                logger.info("Delete expired binlog file '{}'.", name);
-            }
-        }
-        if (!CollectionUtils.isEmpty(shouldDelete)) {
-            index.removeAll(shouldDelete);
-            StringBuilder indexBuilder = new StringBuilder();
-            index.forEach(name -> indexBuilder.append(name).append(LINE_SEPARATOR));
-            write(indexFile, indexBuilder.toString(), false);
-        }
-    }
-
-    private boolean isExpiredFile(File file) throws IOException {
-        BasicFileAttributes attr = Files.readAttributes(file.toPath(), BasicFileAttributes.class);
-        Instant instant = attr.creationTime().toInstant();
-        return Timestamp.from(instant).getTime() < Timestamp.valueOf(LocalDateTime.now().minusDays(BINLOG_EXPIRE_DAYS)).getTime();
-    }
-
-    private String createNewBinlogName(int index) {
-        return String.format("%s.%06d", BINLOG, index <= 0 ? 1 : index + 1);
-    }
-
-    private int getBinlogIndex(String binlogName) {
-        return NumberUtil.toInt(StringUtil.substring(binlogName, BINLOG.length() + 1));
-    }
-
-    public void flush() throws IOException {
-        config.setFileName(pipeline.getBinlogName());
-        config.setPosition(pipeline.getOffset());
-        write(configFile, JsonUtil.objToJson(config), false);
-    }
-
-    public byte[] readLine() throws IOException {
-        return pipeline.readLine();
-    }
-
-    public void write(BinlogMessage message) throws IOException {
-        pipeline.write(message);
-    }
-
-    private void write(File file, String line, boolean append) throws IOException {
-        FileUtils.write(file, line, DEFAULT_CHARSET, append);
-    }
-
-    @Override
-    public void close() {
-        pipeline.close();
-    }
-}

+ 0 - 60
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogPipeline.java

@@ -1,60 +0,0 @@
-package org.dbsyncer.storage.binlog;
-
-import org.apache.commons.io.IOUtils;
-import org.dbsyncer.common.file.BufferedRandomAccessFile;
-import org.dbsyncer.storage.binlog.proto.BinlogMessage;
-
-import java.io.*;
-
-/**
- * @author AE86
- * @version 1.0.0
- * @date 2022/6/19 23:36
- */
-public class BinlogPipeline implements Closeable {
-    private final RandomAccessFile raf;
-    private final OutputStream out;
-    private final byte[] h = new byte[1];
-    private byte[] b;
-    private File file;
-    private long offset;
-
-    public BinlogPipeline(File file, long pos) throws IOException {
-        this.file = file;
-        this.raf = new BufferedRandomAccessFile(file, "r");
-        this.out = new FileOutputStream(file, true);
-        raf.seek(pos);
-    }
-
-    public byte[] readLine() throws IOException {
-        this.offset = raf.getFilePointer();
-        if (offset >= raf.length()) {
-            return null;
-        }
-        raf.read(h);
-        b = new byte[Byte.toUnsignedInt(h[0])];
-        raf.read(b);
-        raf.seek(this.offset + (h.length + b.length));
-        return b;
-    }
-
-    public void write(BinlogMessage message) throws IOException {
-        if(null != message){
-            message.writeDelimitedTo(out);
-        }
-    }
-
-    public long getOffset() {
-        return offset;
-    }
-
-    public String getBinlogName() {
-        return file.getName();
-    }
-
-    @Override
-    public void close() {
-        IOUtils.closeQuietly(out);
-        IOUtils.closeQuietly(raf);
-    }
-}

+ 32 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/BinlogRecorder.java

@@ -3,6 +3,8 @@ package org.dbsyncer.storage.binlog;
 import org.dbsyncer.storage.binlog.proto.BinlogMessage;
 
 import java.io.IOException;
+import java.util.List;
+import java.util.Queue;
 
 /**
  * @author AE86
@@ -11,6 +13,15 @@ import java.io.IOException;
  */
 public interface BinlogRecorder {
 
+    /**
+     * 获取任务名称
+     *
+     * @return
+     */
+    default String getTaskName() {
+        return getClass().getSimpleName();
+    }
+
     /**
      * 将任务序列化刷入磁盘
      *
@@ -18,4 +29,25 @@ public interface BinlogRecorder {
      */
     void flush(BinlogMessage message) throws IOException;
 
+    /**
+     * 消息同步完成后,删除消息记录
+     *
+     * @param messageIds
+     */
+    void complete(List<String> messageIds);
+
+    /**
+     * 获取缓存队列
+     *
+     * @return
+     */
+    Queue getQueue();
+
+    /**
+     * 获取缓存队列容量
+     *
+     * @return
+     */
+    int getQueueCapacity();
+
 }

+ 782 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMap.java

@@ -0,0 +1,782 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: BinlogMessageProto.proto
+
+package org.dbsyncer.storage.binlog.proto;
+
+/**
+ * Protobuf type {@code BinlogMap}
+ */
+public final class BinlogMap extends
+        com.google.protobuf.GeneratedMessageV3 implements
+        // @@protoc_insertion_point(message_implements:BinlogMap)
+        BinlogMapOrBuilder {
+  private static final long serialVersionUID = 0L;
+
+  // Use BinlogMap.newBuilder() to construct.
+  private BinlogMap(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+
+  private BinlogMap() {
+  }
+
+  @Override
+  @SuppressWarnings({"unused"})
+  protected Object newInstance(
+          UnusedPrivateParameter unused) {
+    return new BinlogMap();
+  }
+
+  @Override
+  public final com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return this.unknownFields;
+  }
+
+  private BinlogMap(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    if (extensionRegistry == null) {
+      throw new NullPointerException();
+    }
+    int mutable_bitField0_ = 0;
+    com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder();
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          case 10: {
+            if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+              row_ = com.google.protobuf.MapField.newMapField(
+                      RowDefaultEntryHolder.defaultEntry);
+              mutable_bitField0_ |= 0x00000001;
+            }
+            com.google.protobuf.MapEntry<String, com.google.protobuf.ByteString>
+                    row__ = input.readMessage(
+                    RowDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
+            row_.getMutableMap().put(
+                    row__.getKey(), row__.getValue());
+            break;
+          }
+          default: {
+            if (!parseUnknownField(
+                    input, unknownFields, extensionRegistry, tag)) {
+              done = true;
+            }
+            break;
+          }
+        }
+      }
+    } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (com.google.protobuf.UninitializedMessageException e) {
+      throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new com.google.protobuf.InvalidProtocolBufferException(
+              e).setUnfinishedMessage(this);
+    } finally {
+      this.unknownFields = unknownFields.build();
+      makeExtensionsImmutable();
+    }
+  }
+
+  public static final com.google.protobuf.Descriptors.Descriptor
+  getDescriptor() {
+    return BinlogMessageProto.internal_static_BinlogMap_descriptor;
+  }
+
+  @SuppressWarnings({"rawtypes"})
+  @Override
+  protected com.google.protobuf.MapField internalGetMapField(
+          int number) {
+    switch (number) {
+      case 1:
+        return internalGetRow();
+      default:
+        throw new RuntimeException(
+                "Invalid map field number: " + number);
+    }
+  }
+
+  @Override
+  protected FieldAccessorTable
+  internalGetFieldAccessorTable() {
+    return BinlogMessageProto.internal_static_BinlogMap_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                    BinlogMap.class, Builder.class);
+  }
+
+  public static final int ROW_FIELD_NUMBER = 1;
+
+  private static final class RowDefaultEntryHolder {
+    static final com.google.protobuf.MapEntry<
+            String, com.google.protobuf.ByteString> defaultEntry =
+            com.google.protobuf.MapEntry
+                    .<String, com.google.protobuf.ByteString>newDefaultInstance(
+                            BinlogMessageProto.internal_static_BinlogMap_RowEntry_descriptor,
+                            com.google.protobuf.WireFormat.FieldType.STRING,
+                            "",
+                            com.google.protobuf.WireFormat.FieldType.BYTES,
+                            com.google.protobuf.ByteString.EMPTY);
+  }
+
+  private com.google.protobuf.MapField<
+          String, com.google.protobuf.ByteString> row_;
+
+  private com.google.protobuf.MapField<String, com.google.protobuf.ByteString>
+  internalGetRow() {
+    if (row_ == null) {
+      return com.google.protobuf.MapField.emptyMapField(
+              RowDefaultEntryHolder.defaultEntry);
+    }
+    return row_;
+  }
+
+  public int getRowCount() {
+    return internalGetRow().getMap().size();
+  }
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+
+  @Override
+  public boolean containsRow(
+          String key) {
+    if (key == null) {
+      throw new NullPointerException("map key");
+    }
+    return internalGetRow().getMap().containsKey(key);
+  }
+
+  /**
+   * Use {@link #getRowMap()} instead.
+   */
+  @Override
+  @Deprecated
+  public java.util.Map<String, com.google.protobuf.ByteString> getRow() {
+    return getRowMap();
+  }
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  @Override
+
+  public java.util.Map<String, com.google.protobuf.ByteString> getRowMap() {
+    return internalGetRow().getMap();
+  }
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  @Override
+
+  public com.google.protobuf.ByteString getRowOrDefault(
+          String key,
+          com.google.protobuf.ByteString defaultValue) {
+    if (key == null) {
+      throw new NullPointerException("map key");
+    }
+    java.util.Map<String, com.google.protobuf.ByteString> map =
+            internalGetRow().getMap();
+    return map.containsKey(key) ? map.get(key) : defaultValue;
+  }
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  @Override
+
+  public com.google.protobuf.ByteString getRowOrThrow(
+          String key) {
+    if (key == null) {
+      throw new NullPointerException("map key");
+    }
+    java.util.Map<String, com.google.protobuf.ByteString> map =
+            internalGetRow().getMap();
+    if (!map.containsKey(key)) {
+      throw new IllegalArgumentException();
+    }
+    return map.get(key);
+  }
+
+  private byte memoizedIsInitialized = -1;
+
+  @Override
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  @Override
+  public void writeTo(com.google.protobuf.CodedOutputStream output)
+          throws java.io.IOException {
+    com.google.protobuf.GeneratedMessageV3
+            .serializeStringMapTo(
+                    output,
+                    internalGetRow(),
+                    RowDefaultEntryHolder.defaultEntry,
+                    1);
+    unknownFields.writeTo(output);
+  }
+
+  @Override
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    for (java.util.Map.Entry<String, com.google.protobuf.ByteString> entry
+            : internalGetRow().getMap().entrySet()) {
+      com.google.protobuf.MapEntry<String, com.google.protobuf.ByteString>
+              row__ = RowDefaultEntryHolder.defaultEntry.newBuilderForType()
+              .setKey(entry.getKey())
+              .setValue(entry.getValue())
+              .build();
+      size += com.google.protobuf.CodedOutputStream
+              .computeMessageSize(1, row__);
+    }
+    size += unknownFields.getSerializedSize();
+    memoizedSize = size;
+    return size;
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (!(obj instanceof BinlogMap)) {
+      return super.equals(obj);
+    }
+    BinlogMap other = (BinlogMap) obj;
+
+    if (!internalGetRow().equals(
+            other.internalGetRow())) return false;
+    if (!unknownFields.equals(other.unknownFields)) return false;
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptor().hashCode();
+    if (!internalGetRow().getMap().isEmpty()) {
+      hash = (37 * hash) + ROW_FIELD_NUMBER;
+      hash = (53 * hash) + internalGetRow().hashCode();
+    }
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static BinlogMap parseFrom(
+          java.nio.ByteBuffer data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+
+  public static BinlogMap parseFrom(
+          java.nio.ByteBuffer data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+
+  public static BinlogMap parseFrom(
+          com.google.protobuf.ByteString data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+
+  public static BinlogMap parseFrom(
+          com.google.protobuf.ByteString data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+
+  public static BinlogMap parseFrom(byte[] data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+
+  public static BinlogMap parseFrom(
+          byte[] data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+
+  public static BinlogMap parseFrom(java.io.InputStream input)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseWithIOException(PARSER, input);
+  }
+
+  public static BinlogMap parseFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public static BinlogMap parseDelimitedFrom(java.io.InputStream input)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseDelimitedWithIOException(PARSER, input);
+  }
+
+  public static BinlogMap parseDelimitedFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public static BinlogMap parseFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseWithIOException(PARSER, input);
+  }
+
+  public static BinlogMap parseFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+    return com.google.protobuf.GeneratedMessageV3
+            .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  @Override
+  public Builder newBuilderForType() {
+    return newBuilder();
+  }
+
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+
+  public static Builder newBuilder(BinlogMap prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+
+  @Override
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+            ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @Override
+  protected Builder newBuilderForType(
+          BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+
+  /**
+   * Protobuf type {@code BinlogMap}
+   */
+  public static final class Builder extends
+          com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+          // @@protoc_insertion_point(builder_implements:BinlogMap)
+          BinlogMapOrBuilder {
+    public static final com.google.protobuf.Descriptors.Descriptor
+    getDescriptor() {
+      return BinlogMessageProto.internal_static_BinlogMap_descriptor;
+    }
+
+    @SuppressWarnings({"rawtypes"})
+    protected com.google.protobuf.MapField internalGetMapField(
+            int number) {
+      switch (number) {
+        case 1:
+          return internalGetRow();
+        default:
+          throw new RuntimeException(
+                  "Invalid map field number: " + number);
+      }
+    }
+
+    @SuppressWarnings({"rawtypes"})
+    protected com.google.protobuf.MapField internalGetMutableMapField(
+            int number) {
+      switch (number) {
+        case 1:
+          return internalGetMutableRow();
+        default:
+          throw new RuntimeException(
+                  "Invalid map field number: " + number);
+      }
+    }
+
+    @Override
+    protected FieldAccessorTable
+    internalGetFieldAccessorTable() {
+      return BinlogMessageProto.internal_static_BinlogMap_fieldAccessorTable
+              .ensureFieldAccessorsInitialized(
+                      BinlogMap.class, Builder.class);
+    }
+
+    // Construct using org.dbsyncer.storage.binlog.proto.BinlogMap.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+            BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+
+    private void maybeForceBuilderInitialization() {
+      if (com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+
+    @Override
+    public Builder clear() {
+      super.clear();
+      internalGetMutableRow().clear();
+      return this;
+    }
+
+    @Override
+    public com.google.protobuf.Descriptors.Descriptor
+    getDescriptorForType() {
+      return BinlogMessageProto.internal_static_BinlogMap_descriptor;
+    }
+
+    @Override
+    public BinlogMap getDefaultInstanceForType() {
+      return BinlogMap.getDefaultInstance();
+    }
+
+    @Override
+    public BinlogMap build() {
+      BinlogMap result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    @Override
+    public BinlogMap buildPartial() {
+      BinlogMap result = new BinlogMap(this);
+      int from_bitField0_ = bitField0_;
+      result.row_ = internalGetRow();
+      result.row_.makeImmutable();
+      onBuilt();
+      return result;
+    }
+
+    @Override
+    public Builder clone() {
+      return super.clone();
+    }
+
+    @Override
+    public Builder setField(
+            com.google.protobuf.Descriptors.FieldDescriptor field,
+            Object value) {
+      return super.setField(field, value);
+    }
+
+    @Override
+    public Builder clearField(
+            com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return super.clearField(field);
+    }
+
+    @Override
+    public Builder clearOneof(
+            com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return super.clearOneof(oneof);
+    }
+
+    @Override
+    public Builder setRepeatedField(
+            com.google.protobuf.Descriptors.FieldDescriptor field,
+            int index, Object value) {
+      return super.setRepeatedField(field, index, value);
+    }
+
+    @Override
+    public Builder addRepeatedField(
+            com.google.protobuf.Descriptors.FieldDescriptor field,
+            Object value) {
+      return super.addRepeatedField(field, value);
+    }
+
+    @Override
+    public Builder mergeFrom(com.google.protobuf.Message other) {
+      if (other instanceof BinlogMap) {
+        return mergeFrom((BinlogMap) other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(BinlogMap other) {
+      if (other == BinlogMap.getDefaultInstance()) return this;
+      internalGetMutableRow().mergeFrom(
+              other.internalGetRow());
+      this.mergeUnknownFields(other.unknownFields);
+      onChanged();
+      return this;
+    }
+
+    @Override
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    @Override
+    public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+      BinlogMap parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (BinlogMap) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private int bitField0_;
+
+    private com.google.protobuf.MapField<
+            String, com.google.protobuf.ByteString> row_;
+
+    private com.google.protobuf.MapField<String, com.google.protobuf.ByteString>
+    internalGetRow() {
+      if (row_ == null) {
+        return com.google.protobuf.MapField.emptyMapField(
+                RowDefaultEntryHolder.defaultEntry);
+      }
+      return row_;
+    }
+
+    private com.google.protobuf.MapField<String, com.google.protobuf.ByteString>
+    internalGetMutableRow() {
+      onChanged();
+      ;
+      if (row_ == null) {
+        row_ = com.google.protobuf.MapField.newMapField(
+                RowDefaultEntryHolder.defaultEntry);
+      }
+      if (!row_.isMutable()) {
+        row_ = row_.copy();
+      }
+      return row_;
+    }
+
+    public int getRowCount() {
+      return internalGetRow().getMap().size();
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+
+    @Override
+    public boolean containsRow(
+            String key) {
+      if (key == null) {
+        throw new NullPointerException("map key");
+      }
+      return internalGetRow().getMap().containsKey(key);
+    }
+
+    /**
+     * Use {@link #getRowMap()} instead.
+     */
+    @Override
+    @Deprecated
+    public java.util.Map<String, com.google.protobuf.ByteString> getRow() {
+      return getRowMap();
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+    @Override
+
+    public java.util.Map<String, com.google.protobuf.ByteString> getRowMap() {
+      return internalGetRow().getMap();
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+    @Override
+
+    public com.google.protobuf.ByteString getRowOrDefault(
+            String key,
+            com.google.protobuf.ByteString defaultValue) {
+      if (key == null) {
+        throw new NullPointerException("map key");
+      }
+      java.util.Map<String, com.google.protobuf.ByteString> map =
+              internalGetRow().getMap();
+      return map.containsKey(key) ? map.get(key) : defaultValue;
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+    @Override
+
+    public com.google.protobuf.ByteString getRowOrThrow(
+            String key) {
+      if (key == null) {
+        throw new NullPointerException("map key");
+      }
+      java.util.Map<String, com.google.protobuf.ByteString> map =
+              internalGetRow().getMap();
+      if (!map.containsKey(key)) {
+        throw new IllegalArgumentException();
+      }
+      return map.get(key);
+    }
+
+    public Builder clearRow() {
+      internalGetMutableRow().getMutableMap()
+              .clear();
+      return this;
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+
+    public Builder removeRow(
+            String key) {
+      if (key == null) {
+        throw new NullPointerException("map key");
+      }
+      internalGetMutableRow().getMutableMap()
+              .remove(key);
+      return this;
+    }
+
+    /**
+     * Use alternate mutation accessors instead.
+     */
+    @Deprecated
+    public java.util.Map<String, com.google.protobuf.ByteString>
+    getMutableRow() {
+      return internalGetMutableRow().getMutableMap();
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+    public Builder putRow(
+            String key,
+            com.google.protobuf.ByteString value) {
+      if (key == null) {
+        throw new NullPointerException("map key");
+      }
+      if (value == null) {
+        throw new NullPointerException("map value");
+      }
+
+      internalGetMutableRow().getMutableMap()
+              .put(key, value);
+      return this;
+    }
+
+    /**
+     * <code>map&lt;string, bytes&gt; row = 1;</code>
+     */
+
+    public Builder putAllRow(
+            java.util.Map<String, com.google.protobuf.ByteString> values) {
+      internalGetMutableRow().getMutableMap()
+              .putAll(values);
+      return this;
+    }
+
+    @Override
+    public final Builder setUnknownFields(
+            final com.google.protobuf.UnknownFieldSet unknownFields) {
+      return super.setUnknownFields(unknownFields);
+    }
+
+    @Override
+    public final Builder mergeUnknownFields(
+            final com.google.protobuf.UnknownFieldSet unknownFields) {
+      return super.mergeUnknownFields(unknownFields);
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:BinlogMap)
+  }
+
+  // @@protoc_insertion_point(class_scope:BinlogMap)
+  private static final BinlogMap DEFAULT_INSTANCE;
+
+  static {
+    DEFAULT_INSTANCE = new BinlogMap();
+  }
+
+  public static BinlogMap getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final com.google.protobuf.Parser<BinlogMap>
+          PARSER = new com.google.protobuf.AbstractParser<BinlogMap>() {
+    @Override
+    public BinlogMap parsePartialFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+      return new BinlogMap(input, extensionRegistry);
+    }
+  };
+
+  public static com.google.protobuf.Parser<BinlogMap> parser() {
+    return PARSER;
+  }
+
+  @Override
+  public com.google.protobuf.Parser<BinlogMap> getParserForType() {
+    return PARSER;
+  }
+
+  @Override
+  public BinlogMap getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

+ 50 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMapOrBuilder.java

@@ -0,0 +1,50 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: BinlogMessageProto.proto
+
+package org.dbsyncer.storage.binlog.proto;
+
+public interface BinlogMapOrBuilder extends
+        // @@protoc_insertion_point(interface_extends:BinlogMap)
+        com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  int getRowCount();
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  boolean containsRow(
+          String key);
+
+  /**
+   * Use {@link #getRowMap()} instead.
+   */
+  @Deprecated
+  java.util.Map<String, com.google.protobuf.ByteString>
+  getRow();
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+  java.util.Map<String, com.google.protobuf.ByteString>
+  getRowMap();
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+
+  /* nullable */
+  com.google.protobuf.ByteString getRowOrDefault(
+          String key,
+          /* nullable */
+          com.google.protobuf.ByteString defaultValue);
+
+  /**
+   * <code>map&lt;string, bytes&gt; row = 1;</code>
+   */
+
+  com.google.protobuf.ByteString getRowOrThrow(
+          String key);
+}

File diff suppressed because it is too large
+ 577 - 748
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessage.java


+ 47 - 56
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessageOrBuilder.java

@@ -7,60 +7,51 @@ public interface BinlogMessageOrBuilder extends
         // @@protoc_insertion_point(interface_extends:BinlogMessage)
         com.google.protobuf.MessageOrBuilder {
 
-    /**
-     * <code>string table_group_id = 1;</code>
-     *
-     * @return The tableGroupId.
-     */
-    java.lang.String getTableGroupId();
-
-    /**
-     * <code>string table_group_id = 1;</code>
-     *
-     * @return The bytes for tableGroupId.
-     */
-    com.google.protobuf.ByteString
-    getTableGroupIdBytes();
-
-    /**
-     * <code>.EventEnum event = 2;</code>
-     *
-     * @return The enum numeric value on the wire for event.
-     */
-    int getEventValue();
-
-    /**
-     * <code>.EventEnum event = 2;</code>
-     *
-     * @return The event.
-     */
-    org.dbsyncer.storage.binlog.proto.EventEnum getEvent();
-
-    /**
-     * <code>repeated .Data data = 3;</code>
-     */
-    java.util.List<org.dbsyncer.storage.binlog.proto.Data>
-    getDataList();
-
-    /**
-     * <code>repeated .Data data = 3;</code>
-     */
-    org.dbsyncer.storage.binlog.proto.Data getData(int index);
-
-    /**
-     * <code>repeated .Data data = 3;</code>
-     */
-    int getDataCount();
-
-    /**
-     * <code>repeated .Data data = 3;</code>
-     */
-    java.util.List<? extends org.dbsyncer.storage.binlog.proto.DataOrBuilder>
-    getDataOrBuilderList();
-
-    /**
-     * <code>repeated .Data data = 3;</code>
-     */
-    org.dbsyncer.storage.binlog.proto.DataOrBuilder getDataOrBuilder(
-            int index);
+  /**
+   * <code>string table_group_id = 1;</code>
+   *
+   * @return The tableGroupId.
+   */
+  String getTableGroupId();
+
+  /**
+   * <code>string table_group_id = 1;</code>
+   *
+   * @return The bytes for tableGroupId.
+   */
+  com.google.protobuf.ByteString
+  getTableGroupIdBytes();
+
+  /**
+   * <code>.EventEnum event = 2;</code>
+   *
+   * @return The enum numeric value on the wire for event.
+   */
+  int getEventValue();
+
+  /**
+   * <code>.EventEnum event = 2;</code>
+   *
+   * @return The event.
+   */
+  EventEnum getEvent();
+
+  /**
+   * <code>.BinlogMap data = 3;</code>
+   *
+   * @return Whether the data field is set.
+   */
+  boolean hasData();
+
+  /**
+   * <code>.BinlogMap data = 3;</code>
+   *
+   * @return The data.
+   */
+  BinlogMap getData();
+
+  /**
+   * <code>.BinlogMap data = 3;</code>
+   */
+  BinlogMapOrBuilder getDataOrBuilder();
 }

+ 67 - 66
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/BinlogMessageProto.java

@@ -4,77 +4,78 @@
 package org.dbsyncer.storage.binlog.proto;
 
 public final class BinlogMessageProto {
-    private BinlogMessageProto() {
-    }
+  private BinlogMessageProto() {
+  }
 
-    public static void registerAllExtensions(
-            com.google.protobuf.ExtensionRegistryLite registry) {
-    }
+  public static void registerAllExtensions(
+          com.google.protobuf.ExtensionRegistryLite registry) {
+  }
 
-    public static void registerAllExtensions(
-            com.google.protobuf.ExtensionRegistry registry) {
-        registerAllExtensions(
-                (com.google.protobuf.ExtensionRegistryLite) registry);
-    }
+  public static void registerAllExtensions(
+          com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+            (com.google.protobuf.ExtensionRegistryLite) registry);
+  }
 
-    static final com.google.protobuf.Descriptors.Descriptor
-            internal_static_BinlogMessage_descriptor;
-    static final
-    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-            internal_static_BinlogMessage_fieldAccessorTable;
-    static final com.google.protobuf.Descriptors.Descriptor
-            internal_static_Data_descriptor;
-    static final
-    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-            internal_static_Data_fieldAccessorTable;
-    static final com.google.protobuf.Descriptors.Descriptor
-            internal_static_Data_RowEntry_descriptor;
-    static final
-    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-            internal_static_Data_RowEntry_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+          internal_static_BinlogMessage_descriptor;
+  static final
+  com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internal_static_BinlogMessage_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+          internal_static_BinlogMap_descriptor;
+  static final
+  com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internal_static_BinlogMap_fieldAccessorTable;
+  static final com.google.protobuf.Descriptors.Descriptor
+          internal_static_BinlogMap_RowEntry_descriptor;
+  static final
+  com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internal_static_BinlogMap_RowEntry_fieldAccessorTable;
 
-    public static com.google.protobuf.Descriptors.FileDescriptor
-    getDescriptor() {
-        return descriptor;
-    }
+  public static com.google.protobuf.Descriptors.FileDescriptor
+  getDescriptor() {
+    return descriptor;
+  }
 
-    private static com.google.protobuf.Descriptors.FileDescriptor
-            descriptor;
+  private static com.google.protobuf.Descriptors.FileDescriptor
+          descriptor;
 
-    static {
-        java.lang.String[] descriptorData = {
-                "\n\030BinlogMessageProto.proto\"W\n\rBinlogMess" +
-                        "age\022\026\n\016table_group_id\030\001 \001(\t\022\031\n\005event\030\002 \001" +
-                        "(\0162\n.EventEnum\022\023\n\004data\030\003 \003(\0132\005.Data\"O\n\004D" +
-                        "ata\022\033\n\003row\030\001 \003(\0132\016.Data.RowEntry\032*\n\010RowE" +
-                        "ntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\014:\0028\001*/\n\t" +
-                        "EventEnum\022\n\n\006UPDATE\020\000\022\n\n\006INSERT\020\001\022\n\n\006DEL" +
-                        "ETE\020\002B;\n!org.dbsyncer.storage.binlog.pro" +
-                        "toB\022BinlogMessageProtoH\001P\001b\006proto3"
-        };
-        descriptor = com.google.protobuf.Descriptors.FileDescriptor
-                .internalBuildGeneratedFileFrom(descriptorData,
-                        new com.google.protobuf.Descriptors.FileDescriptor[]{
-                        });
-        internal_static_BinlogMessage_descriptor =
-                getDescriptor().getMessageTypes().get(0);
-        internal_static_BinlogMessage_fieldAccessorTable = new
-                com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-                internal_static_BinlogMessage_descriptor,
-                new java.lang.String[]{"TableGroupId", "Event", "Data",});
-        internal_static_Data_descriptor =
-                getDescriptor().getMessageTypes().get(1);
-        internal_static_Data_fieldAccessorTable = new
-                com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-                internal_static_Data_descriptor,
-                new java.lang.String[]{"Row",});
-        internal_static_Data_RowEntry_descriptor =
-                internal_static_Data_descriptor.getNestedTypes().get(0);
-        internal_static_Data_RowEntry_fieldAccessorTable = new
-                com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-                internal_static_Data_RowEntry_descriptor,
-                new java.lang.String[]{"Key", "Value",});
-    }
+  static {
+    String[] descriptorData = {
+            "\n\030BinlogMessageProto.proto\"\\\n\rBinlogMess" +
+                    "age\022\026\n\016table_group_id\030\001 \001(\t\022\031\n\005event\030\002 \001" +
+                    "(\0162\n.EventEnum\022\030\n\004data\030\003 \001(\0132\n.BinlogMap" +
+                    "\"Y\n\tBinlogMap\022 \n\003row\030\001 \003(\0132\023.BinlogMap.R" +
+                    "owEntry\032*\n\010RowEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu" +
+                    "e\030\002 \001(\014:\0028\001*/\n\tEventEnum\022\n\n\006UPDATE\020\000\022\n\n\006" +
+                    "INSERT\020\001\022\n\n\006DELETE\020\002B;\n!org.dbsyncer.sto" +
+                    "rage.binlog.protoB\022BinlogMessageProtoH\001P" +
+                    "\001b\006proto3"
+    };
+    descriptor = com.google.protobuf.Descriptors.FileDescriptor
+            .internalBuildGeneratedFileFrom(descriptorData,
+                    new com.google.protobuf.Descriptors.FileDescriptor[]{
+                    });
+    internal_static_BinlogMessage_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+    internal_static_BinlogMessage_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_BinlogMessage_descriptor,
+            new String[]{"TableGroupId", "Event", "Data",});
+    internal_static_BinlogMap_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+    internal_static_BinlogMap_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_BinlogMap_descriptor,
+            new String[]{"Row",});
+    internal_static_BinlogMap_RowEntry_descriptor =
+            internal_static_BinlogMap_descriptor.getNestedTypes().get(0);
+    internal_static_BinlogMap_RowEntry_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+            internal_static_BinlogMap_RowEntry_descriptor,
+            new String[]{"Key", "Value",});
+  }
 
-    // @@protoc_insertion_point(outer_class_scope)
+  // @@protoc_insertion_point(outer_class_scope)
 }

+ 0 - 782
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/Data.java

@@ -1,782 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: BinlogMessageProto.proto
-
-package org.dbsyncer.storage.binlog.proto;
-
-/**
- * Protobuf type {@code Data}
- */
-public final class Data extends
-        com.google.protobuf.GeneratedMessageV3 implements
-        // @@protoc_insertion_point(message_implements:Data)
-        DataOrBuilder {
-    private static final long serialVersionUID = 0L;
-
-    // Use Data.newBuilder() to construct.
-    private Data(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
-        super(builder);
-    }
-
-    private Data() {
-    }
-
-    @java.lang.Override
-    @SuppressWarnings({"unused"})
-    protected java.lang.Object newInstance(
-            UnusedPrivateParameter unused) {
-        return new Data();
-    }
-
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-    getUnknownFields() {
-        return this.unknownFields;
-    }
-
-    private Data(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        this();
-        if (extensionRegistry == null) {
-            throw new java.lang.NullPointerException();
-        }
-        int mutable_bitField0_ = 0;
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-                com.google.protobuf.UnknownFieldSet.newBuilder();
-        try {
-            boolean done = false;
-            while (!done) {
-                int tag = input.readTag();
-                switch (tag) {
-                    case 0:
-                        done = true;
-                        break;
-                    case 10: {
-                        if (!((mutable_bitField0_ & 0x00000001) != 0)) {
-                            row_ = com.google.protobuf.MapField.newMapField(
-                                    RowDefaultEntryHolder.defaultEntry);
-                            mutable_bitField0_ |= 0x00000001;
-                        }
-                        com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.ByteString>
-                                row__ = input.readMessage(
-                                RowDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
-                        row_.getMutableMap().put(
-                                row__.getKey(), row__.getValue());
-                        break;
-                    }
-                    default: {
-                        if (!parseUnknownField(
-                                input, unknownFields, extensionRegistry, tag)) {
-                            done = true;
-                        }
-                        break;
-                    }
-                }
-            }
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-            throw e.setUnfinishedMessage(this);
-        } catch (com.google.protobuf.UninitializedMessageException e) {
-            throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
-        } catch (java.io.IOException e) {
-            throw new com.google.protobuf.InvalidProtocolBufferException(
-                    e).setUnfinishedMessage(this);
-        } finally {
-            this.unknownFields = unknownFields.build();
-            makeExtensionsImmutable();
-        }
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-    getDescriptor() {
-        return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_descriptor;
-    }
-
-    @SuppressWarnings({"rawtypes"})
-    @java.lang.Override
-    protected com.google.protobuf.MapField internalGetMapField(
-            int number) {
-        switch (number) {
-            case 1:
-                return internalGetRow();
-            default:
-                throw new RuntimeException(
-                        "Invalid map field number: " + number);
-        }
-    }
-
-    @java.lang.Override
-    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-    internalGetFieldAccessorTable() {
-        return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_fieldAccessorTable
-                .ensureFieldAccessorsInitialized(
-                        org.dbsyncer.storage.binlog.proto.Data.class, org.dbsyncer.storage.binlog.proto.Data.Builder.class);
-    }
-
-    public static final int ROW_FIELD_NUMBER = 1;
-
-    private static final class RowDefaultEntryHolder {
-        static final com.google.protobuf.MapEntry<
-                java.lang.String, com.google.protobuf.ByteString> defaultEntry =
-                com.google.protobuf.MapEntry
-                        .<java.lang.String, com.google.protobuf.ByteString>newDefaultInstance(
-                                org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_RowEntry_descriptor,
-                                com.google.protobuf.WireFormat.FieldType.STRING,
-                                "",
-                                com.google.protobuf.WireFormat.FieldType.BYTES,
-                                com.google.protobuf.ByteString.EMPTY);
-    }
-
-    private com.google.protobuf.MapField<
-            java.lang.String, com.google.protobuf.ByteString> row_;
-
-    private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
-    internalGetRow() {
-        if (row_ == null) {
-            return com.google.protobuf.MapField.emptyMapField(
-                    RowDefaultEntryHolder.defaultEntry);
-        }
-        return row_;
-    }
-
-    public int getRowCount() {
-        return internalGetRow().getMap().size();
-    }
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-
-    @java.lang.Override
-    public boolean containsRow(
-            java.lang.String key) {
-        if (key == null) {
-            throw new NullPointerException("map key");
-        }
-        return internalGetRow().getMap().containsKey(key);
-    }
-
-    /**
-     * Use {@link #getRowMap()} instead.
-     */
-    @java.lang.Override
-    @java.lang.Deprecated
-    public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getRow() {
-        return getRowMap();
-    }
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    @java.lang.Override
-
-    public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getRowMap() {
-        return internalGetRow().getMap();
-    }
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    @java.lang.Override
-
-    public com.google.protobuf.ByteString getRowOrDefault(
-            java.lang.String key,
-            com.google.protobuf.ByteString defaultValue) {
-        if (key == null) {
-            throw new NullPointerException("map key");
-        }
-        java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
-                internalGetRow().getMap();
-        return map.containsKey(key) ? map.get(key) : defaultValue;
-    }
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    @java.lang.Override
-
-    public com.google.protobuf.ByteString getRowOrThrow(
-            java.lang.String key) {
-        if (key == null) {
-            throw new NullPointerException("map key");
-        }
-        java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
-                internalGetRow().getMap();
-        if (!map.containsKey(key)) {
-            throw new java.lang.IllegalArgumentException();
-        }
-        return map.get(key);
-    }
-
-    private byte memoizedIsInitialized = -1;
-
-    @java.lang.Override
-    public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized == 1) return true;
-        if (isInitialized == 0) return false;
-
-        memoizedIsInitialized = 1;
-        return true;
-    }
-
-    @java.lang.Override
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-            throws java.io.IOException {
-        com.google.protobuf.GeneratedMessageV3
-                .serializeStringMapTo(
-                        output,
-                        internalGetRow(),
-                        RowDefaultEntryHolder.defaultEntry,
-                        1);
-        unknownFields.writeTo(output);
-    }
-
-    @java.lang.Override
-    public int getSerializedSize() {
-        int size = memoizedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        for (java.util.Map.Entry<java.lang.String, com.google.protobuf.ByteString> entry
-                : internalGetRow().getMap().entrySet()) {
-            com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.ByteString>
-                    row__ = RowDefaultEntryHolder.defaultEntry.newBuilderForType()
-                    .setKey(entry.getKey())
-                    .setValue(entry.getValue())
-                    .build();
-            size += com.google.protobuf.CodedOutputStream
-                    .computeMessageSize(1, row__);
-        }
-        size += unknownFields.getSerializedSize();
-        memoizedSize = size;
-        return size;
-    }
-
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-        if (obj == this) {
-            return true;
-        }
-        if (!(obj instanceof org.dbsyncer.storage.binlog.proto.Data)) {
-            return super.equals(obj);
-        }
-        org.dbsyncer.storage.binlog.proto.Data other = (org.dbsyncer.storage.binlog.proto.Data) obj;
-
-        if (!internalGetRow().equals(
-                other.internalGetRow())) return false;
-        if (!unknownFields.equals(other.unknownFields)) return false;
-        return true;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-        if (memoizedHashCode != 0) {
-            return memoizedHashCode;
-        }
-        int hash = 41;
-        hash = (19 * hash) + getDescriptor().hashCode();
-        if (!internalGetRow().getMap().isEmpty()) {
-            hash = (37 * hash) + ROW_FIELD_NUMBER;
-            hash = (53 * hash) + internalGetRow().hashCode();
-        }
-        hash = (29 * hash) + unknownFields.hashCode();
-        memoizedHashCode = hash;
-        return hash;
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            java.nio.ByteBuffer data)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            java.nio.ByteBuffer data,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            com.google.protobuf.ByteString data)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            com.google.protobuf.ByteString data,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(byte[] data)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            byte[] data,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws com.google.protobuf.InvalidProtocolBufferException {
-        return PARSER.parseFrom(data, extensionRegistry);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(java.io.InputStream input)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseWithIOException(PARSER, input);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            java.io.InputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseDelimitedFrom(java.io.InputStream input)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseDelimitedWithIOException(PARSER, input);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseDelimitedFrom(
-            java.io.InputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            com.google.protobuf.CodedInputStream input)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseWithIOException(PARSER, input);
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data parseFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-        return com.google.protobuf.GeneratedMessageV3
-                .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    @java.lang.Override
-    public Builder newBuilderForType() {
-        return newBuilder();
-    }
-
-    public static Builder newBuilder() {
-        return DEFAULT_INSTANCE.toBuilder();
-    }
-
-    public static Builder newBuilder(org.dbsyncer.storage.binlog.proto.Data prototype) {
-        return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
-    }
-
-    @java.lang.Override
-    public Builder toBuilder() {
-        return this == DEFAULT_INSTANCE
-                ? new Builder() : new Builder().mergeFrom(this);
-    }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-            com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-    }
-
-    /**
-     * Protobuf type {@code Data}
-     */
-    public static final class Builder extends
-            com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
-            // @@protoc_insertion_point(builder_implements:Data)
-            org.dbsyncer.storage.binlog.proto.DataOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-            return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_descriptor;
-        }
-
-        @SuppressWarnings({"rawtypes"})
-        protected com.google.protobuf.MapField internalGetMapField(
-                int number) {
-            switch (number) {
-                case 1:
-                    return internalGetRow();
-                default:
-                    throw new RuntimeException(
-                            "Invalid map field number: " + number);
-            }
-        }
-
-        @SuppressWarnings({"rawtypes"})
-        protected com.google.protobuf.MapField internalGetMutableMapField(
-                int number) {
-            switch (number) {
-                case 1:
-                    return internalGetMutableRow();
-                default:
-                    throw new RuntimeException(
-                            "Invalid map field number: " + number);
-            }
-        }
-
-        @java.lang.Override
-        protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-            return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_fieldAccessorTable
-                    .ensureFieldAccessorsInitialized(
-                            org.dbsyncer.storage.binlog.proto.Data.class, org.dbsyncer.storage.binlog.proto.Data.Builder.class);
-        }
-
-        // Construct using org.dbsyncer.storage.binlog.proto.Data.newBuilder()
-        private Builder() {
-            maybeForceBuilderInitialization();
-        }
-
-        private Builder(
-                com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-            super(parent);
-            maybeForceBuilderInitialization();
-        }
-
-        private void maybeForceBuilderInitialization() {
-            if (com.google.protobuf.GeneratedMessageV3
-                    .alwaysUseFieldBuilders) {
-            }
-        }
-
-        @java.lang.Override
-        public Builder clear() {
-            super.clear();
-            internalGetMutableRow().clear();
-            return this;
-        }
-
-        @java.lang.Override
-        public com.google.protobuf.Descriptors.Descriptor
-        getDescriptorForType() {
-            return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.internal_static_Data_descriptor;
-        }
-
-        @java.lang.Override
-        public org.dbsyncer.storage.binlog.proto.Data getDefaultInstanceForType() {
-            return org.dbsyncer.storage.binlog.proto.Data.getDefaultInstance();
-        }
-
-        @java.lang.Override
-        public org.dbsyncer.storage.binlog.proto.Data build() {
-            org.dbsyncer.storage.binlog.proto.Data result = buildPartial();
-            if (!result.isInitialized()) {
-                throw newUninitializedMessageException(result);
-            }
-            return result;
-        }
-
-        @java.lang.Override
-        public org.dbsyncer.storage.binlog.proto.Data buildPartial() {
-            org.dbsyncer.storage.binlog.proto.Data result = new org.dbsyncer.storage.binlog.proto.Data(this);
-            int from_bitField0_ = bitField0_;
-            result.row_ = internalGetRow();
-            result.row_.makeImmutable();
-            onBuilt();
-            return result;
-        }
-
-        @java.lang.Override
-        public Builder clone() {
-            return super.clone();
-        }
-
-        @java.lang.Override
-        public Builder setField(
-                com.google.protobuf.Descriptors.FieldDescriptor field,
-                java.lang.Object value) {
-            return super.setField(field, value);
-        }
-
-        @java.lang.Override
-        public Builder clearField(
-                com.google.protobuf.Descriptors.FieldDescriptor field) {
-            return super.clearField(field);
-        }
-
-        @java.lang.Override
-        public Builder clearOneof(
-                com.google.protobuf.Descriptors.OneofDescriptor oneof) {
-            return super.clearOneof(oneof);
-        }
-
-        @java.lang.Override
-        public Builder setRepeatedField(
-                com.google.protobuf.Descriptors.FieldDescriptor field,
-                int index, java.lang.Object value) {
-            return super.setRepeatedField(field, index, value);
-        }
-
-        @java.lang.Override
-        public Builder addRepeatedField(
-                com.google.protobuf.Descriptors.FieldDescriptor field,
-                java.lang.Object value) {
-            return super.addRepeatedField(field, value);
-        }
-
-        @java.lang.Override
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-            if (other instanceof org.dbsyncer.storage.binlog.proto.Data) {
-                return mergeFrom((org.dbsyncer.storage.binlog.proto.Data) other);
-            } else {
-                super.mergeFrom(other);
-                return this;
-            }
-        }
-
-        public Builder mergeFrom(org.dbsyncer.storage.binlog.proto.Data other) {
-            if (other == org.dbsyncer.storage.binlog.proto.Data.getDefaultInstance()) return this;
-            internalGetMutableRow().mergeFrom(
-                    other.internalGetRow());
-            this.mergeUnknownFields(other.unknownFields);
-            onChanged();
-            return this;
-        }
-
-        @java.lang.Override
-        public final boolean isInitialized() {
-            return true;
-        }
-
-        @java.lang.Override
-        public Builder mergeFrom(
-                com.google.protobuf.CodedInputStream input,
-                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                throws java.io.IOException {
-            org.dbsyncer.storage.binlog.proto.Data parsedMessage = null;
-            try {
-                parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-                parsedMessage = (org.dbsyncer.storage.binlog.proto.Data) e.getUnfinishedMessage();
-                throw e.unwrapIOException();
-            } finally {
-                if (parsedMessage != null) {
-                    mergeFrom(parsedMessage);
-                }
-            }
-            return this;
-        }
-
-        private int bitField0_;
-
-        private com.google.protobuf.MapField<
-                java.lang.String, com.google.protobuf.ByteString> row_;
-
-        private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
-        internalGetRow() {
-            if (row_ == null) {
-                return com.google.protobuf.MapField.emptyMapField(
-                        RowDefaultEntryHolder.defaultEntry);
-            }
-            return row_;
-        }
-
-        private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
-        internalGetMutableRow() {
-            onChanged();
-            ;
-            if (row_ == null) {
-                row_ = com.google.protobuf.MapField.newMapField(
-                        RowDefaultEntryHolder.defaultEntry);
-            }
-            if (!row_.isMutable()) {
-                row_ = row_.copy();
-            }
-            return row_;
-        }
-
-        public int getRowCount() {
-            return internalGetRow().getMap().size();
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-
-        @java.lang.Override
-        public boolean containsRow(
-                java.lang.String key) {
-            if (key == null) {
-                throw new NullPointerException("map key");
-            }
-            return internalGetRow().getMap().containsKey(key);
-        }
-
-        /**
-         * Use {@link #getRowMap()} instead.
-         */
-        @java.lang.Override
-        @java.lang.Deprecated
-        public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getRow() {
-            return getRowMap();
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-        @java.lang.Override
-
-        public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getRowMap() {
-            return internalGetRow().getMap();
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-        @java.lang.Override
-
-        public com.google.protobuf.ByteString getRowOrDefault(
-                java.lang.String key,
-                com.google.protobuf.ByteString defaultValue) {
-            if (key == null) {
-                throw new NullPointerException("map key");
-            }
-            java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
-                    internalGetRow().getMap();
-            return map.containsKey(key) ? map.get(key) : defaultValue;
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-        @java.lang.Override
-
-        public com.google.protobuf.ByteString getRowOrThrow(
-                java.lang.String key) {
-            if (key == null) {
-                throw new NullPointerException("map key");
-            }
-            java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
-                    internalGetRow().getMap();
-            if (!map.containsKey(key)) {
-                throw new java.lang.IllegalArgumentException();
-            }
-            return map.get(key);
-        }
-
-        public Builder clearRow() {
-            internalGetMutableRow().getMutableMap()
-                    .clear();
-            return this;
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-
-        public Builder removeRow(
-                java.lang.String key) {
-            if (key == null) {
-                throw new NullPointerException("map key");
-            }
-            internalGetMutableRow().getMutableMap()
-                    .remove(key);
-            return this;
-        }
-
-        /**
-         * Use alternate mutation accessors instead.
-         */
-        @java.lang.Deprecated
-        public java.util.Map<java.lang.String, com.google.protobuf.ByteString>
-        getMutableRow() {
-            return internalGetMutableRow().getMutableMap();
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-        public Builder putRow(
-                java.lang.String key,
-                com.google.protobuf.ByteString value) {
-            if (key == null) {
-                throw new NullPointerException("map key");
-            }
-            if (value == null) {
-                throw new NullPointerException("map value");
-            }
-
-            internalGetMutableRow().getMutableMap()
-                    .put(key, value);
-            return this;
-        }
-
-        /**
-         * <code>map&lt;string, bytes&gt; row = 1;</code>
-         */
-
-        public Builder putAllRow(
-                java.util.Map<java.lang.String, com.google.protobuf.ByteString> values) {
-            internalGetMutableRow().getMutableMap()
-                    .putAll(values);
-            return this;
-        }
-
-        @java.lang.Override
-        public final Builder setUnknownFields(
-                final com.google.protobuf.UnknownFieldSet unknownFields) {
-            return super.setUnknownFields(unknownFields);
-        }
-
-        @java.lang.Override
-        public final Builder mergeUnknownFields(
-                final com.google.protobuf.UnknownFieldSet unknownFields) {
-            return super.mergeUnknownFields(unknownFields);
-        }
-
-
-        // @@protoc_insertion_point(builder_scope:Data)
-    }
-
-    // @@protoc_insertion_point(class_scope:Data)
-    private static final org.dbsyncer.storage.binlog.proto.Data DEFAULT_INSTANCE;
-
-    static {
-        DEFAULT_INSTANCE = new org.dbsyncer.storage.binlog.proto.Data();
-    }
-
-    public static org.dbsyncer.storage.binlog.proto.Data getDefaultInstance() {
-        return DEFAULT_INSTANCE;
-    }
-
-    private static final com.google.protobuf.Parser<Data>
-            PARSER = new com.google.protobuf.AbstractParser<Data>() {
-        @java.lang.Override
-        public Data parsePartialFrom(
-                com.google.protobuf.CodedInputStream input,
-                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                throws com.google.protobuf.InvalidProtocolBufferException {
-            return new Data(input, extensionRegistry);
-        }
-    };
-
-    public static com.google.protobuf.Parser<Data> parser() {
-        return PARSER;
-    }
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<Data> getParserForType() {
-        return PARSER;
-    }
-
-    @java.lang.Override
-    public org.dbsyncer.storage.binlog.proto.Data getDefaultInstanceForType() {
-        return DEFAULT_INSTANCE;
-    }
-
-}
-

+ 0 - 50
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/DataOrBuilder.java

@@ -1,50 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: BinlogMessageProto.proto
-
-package org.dbsyncer.storage.binlog.proto;
-
-public interface DataOrBuilder extends
-        // @@protoc_insertion_point(interface_extends:Data)
-        com.google.protobuf.MessageOrBuilder {
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    int getRowCount();
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    boolean containsRow(
-            java.lang.String key);
-
-    /**
-     * Use {@link #getRowMap()} instead.
-     */
-    @java.lang.Deprecated
-    java.util.Map<java.lang.String, com.google.protobuf.ByteString>
-    getRow();
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-    java.util.Map<java.lang.String, com.google.protobuf.ByteString>
-    getRowMap();
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-
-    /* nullable */
-    com.google.protobuf.ByteString getRowOrDefault(
-            java.lang.String key,
-            /* nullable */
-            com.google.protobuf.ByteString defaultValue);
-
-    /**
-     * <code>map&lt;string, bytes&gt; row = 1;</code>
-     */
-
-    com.google.protobuf.ByteString getRowOrThrow(
-            java.lang.String key);
-}

+ 5 - 5
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/proto/EventEnum.java

@@ -39,7 +39,7 @@ public enum EventEnum
 
   public final int getNumber() {
     if (this == UNRECOGNIZED) {
-      throw new java.lang.IllegalArgumentException(
+      throw new IllegalArgumentException(
               "Can't get the number of an unknown enum value.");
     }
     return value;
@@ -50,7 +50,7 @@ public enum EventEnum
    * @return The enum associated with the given numeric wire value.
    * @deprecated Use {@link #forNumber(int)} instead.
    */
-  @java.lang.Deprecated
+  @Deprecated
   public static EventEnum valueOf(int value) {
     return forNumber(value);
   }
@@ -88,7 +88,7 @@ public enum EventEnum
   public final com.google.protobuf.Descriptors.EnumValueDescriptor
   getValueDescriptor() {
     if (this == UNRECOGNIZED) {
-      throw new java.lang.IllegalStateException(
+      throw new IllegalStateException(
               "Can't get the descriptor of an unrecognized enum value.");
     }
     return getDescriptor().getValues().get(ordinal());
@@ -101,7 +101,7 @@ public enum EventEnum
 
   public static final com.google.protobuf.Descriptors.EnumDescriptor
   getDescriptor() {
-    return org.dbsyncer.storage.binlog.proto.BinlogMessageProto.getDescriptor().getEnumTypes().get(0);
+    return BinlogMessageProto.getDescriptor().getEnumTypes().get(0);
   }
 
   private static final EventEnum[] VALUES = values();
@@ -109,7 +109,7 @@ public enum EventEnum
   public static EventEnum valueOf(
           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
     if (desc.getType() != getDescriptor()) {
-      throw new java.lang.IllegalArgumentException(
+      throw new IllegalArgumentException(
               "EnumValueDescriptor is not for this type.");
     }
     if (desc.getIndex() == -1) {

+ 24 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/constant/BinlogConstant.java

@@ -0,0 +1,24 @@
+package org.dbsyncer.storage.constant;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/7/13 22:14
+ */
+public class BinlogConstant {
+
+    /**
+     * 属性
+     */
+    public static final String BINLOG_ID = "id";
+    public static final String BINLOG_STATUS = "s";
+    public static final String BINLOG_CONTENT = "c";
+    public static final String BINLOG_TIME = "t";
+
+    /**
+     * 状态类型
+     */
+    public static final int READY = 0;
+    public static final int PROCESSING = 1;
+
+}

+ 18 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/enums/BinlogStatusEnum.java

@@ -0,0 +1,18 @@
+package org.dbsyncer.storage.enums;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/29 20:31
+ */
+public enum BinlogStatusEnum {
+
+    /**
+     * 运行中
+     */
+    RUNNING,
+    /**
+     * 停止
+     */
+    STOP
+}

+ 24 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/enums/IndexFieldResolverEnum.java

@@ -0,0 +1,24 @@
+package org.dbsyncer.storage.enums;
+
+import org.dbsyncer.storage.lucene.IndexFieldResolver;
+
+public enum IndexFieldResolverEnum {
+
+    LONG((f) -> f.numericValue().longValue()),
+
+    INT((f) -> f.numericValue().intValue()),
+
+    STRING((f) -> f.stringValue()),
+
+    BINARY((f) -> f.binaryValue());
+
+    private IndexFieldResolver indexFieldResolver;
+
+    IndexFieldResolverEnum(IndexFieldResolver indexFieldResolver) {
+        this.indexFieldResolver = indexFieldResolver;
+    }
+
+    public IndexFieldResolver getIndexFieldResolver() {
+        return indexFieldResolver;
+    }
+}

+ 9 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/lucene/IndexFieldResolver.java

@@ -0,0 +1,9 @@
+package org.dbsyncer.storage.lucene;
+
+import org.apache.lucene.index.IndexableField;
+
+public interface IndexFieldResolver {
+
+    Object getValue(IndexableField field);
+
+}

+ 16 - 12
dbsyncer-storage/src/main/java/org/dbsyncer/storage/lucene/Shard.java

@@ -83,6 +83,12 @@ public class Shard {
         }
     }
 
+    public void deleteBatch(Term... terms) throws IOException {
+        if (null != terms) {
+            execute(terms, () -> indexWriter.deleteDocuments(terms));
+        }
+    }
+
     public void deleteAll() throws IOException {
         // Fix Bug: this IndexReader is closed. 直接删除文件
         close();
@@ -115,19 +121,9 @@ public class Shard {
         return analyzer;
     }
 
-    public List<Map> query(Query query) throws IOException {
-        final IndexSearcher searcher = getSearcher();
-        final TopDocs topDocs = searcher.search(query, MAX_SIZE);
-        return search(searcher, topDocs, new Option(), 1, 20);
-    }
-
-    public Paging query(Query query, Sort sort) throws IOException {
-        return query(new Option(query), 1, 20, sort);
-    }
-
     public Paging query(Option option, int pageNum, int pageSize, Sort sort) throws IOException {
         final IndexSearcher searcher = getSearcher();
-        final TopDocs topDocs = searcher.search(option.getQuery(), MAX_SIZE, sort);
+        final TopDocs topDocs = getTopDocs(searcher, option.getQuery(), MAX_SIZE, sort);
         Paging paging = new Paging(pageNum, pageSize);
         List<Map> data = search(searcher, topDocs, option, pageNum, pageSize);
         paging.setTotal(topDocs.totalHits);
@@ -135,6 +131,13 @@ public class Shard {
         return paging;
     }
 
+    private TopDocs getTopDocs(IndexSearcher searcher, Query query, int maxSize, Sort sort) throws IOException {
+        if (null != sort) {
+            return searcher.search(query, maxSize, sort);
+        }
+        return searcher.search(query, maxSize);
+    }
+
     /**
      * 执行查询
      *
@@ -184,7 +187,8 @@ public class Shard {
                     }
                 }
 
-                r.put(f.name(), f.stringValue());
+                // 解析value类型
+                r.put(f.name(), option.getFieldResolver(f.name()).getValue(f));
             }
             list.add(r);
         }

+ 4 - 4
dbsyncer-storage/src/main/java/org/dbsyncer/storage/binlog/Binlog.java → dbsyncer-storage/src/main/java/org/dbsyncer/storage/model/BinlogConfig.java

@@ -1,11 +1,11 @@
-package org.dbsyncer.storage.binlog;
+package org.dbsyncer.storage.model;
 
 /**
  * @author AE86
  * @version 1.0.0
  * @date 2022/6/19 23:03
  */
-public final class Binlog {
+public final class BinlogConfig {
     private String fileName;
     private long position = 0;
 
@@ -13,7 +13,7 @@ public final class Binlog {
         return fileName;
     }
 
-    public Binlog setFileName(String fileName) {
+    public BinlogConfig setFileName(String fileName) {
         this.fileName = fileName;
         return this;
     }
@@ -22,7 +22,7 @@ public final class Binlog {
         return position;
     }
 
-    public Binlog setPosition(long position) {
+    public BinlogConfig setPosition(long position) {
         this.position = position;
         return this;
     }

+ 70 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/model/BinlogIndex.java

@@ -0,0 +1,70 @@
+package org.dbsyncer.storage.model;
+
+import org.dbsyncer.storage.binlog.BinlogActuator;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/6/26 22:48
+ */
+public class BinlogIndex {
+    private String fileName;
+    private Set<BinlogActuator> lock;
+    private LocalDateTime createTime;
+    private LocalDateTime updateTime;
+
+    public BinlogIndex(String fileName, LocalDateTime createTime) {
+        this.fileName = fileName;
+        this.lock = new HashSet<>();
+        this.createTime = createTime;
+        this.updateTime = LocalDateTime.now();
+    }
+
+    public void addLock(BinlogActuator binlogActuator) {
+        this.lock.add(binlogActuator);
+    }
+
+    public void removeAllLock() throws IOException {
+        Iterator<BinlogActuator> iterator = lock.iterator();
+        while (iterator.hasNext()){
+            BinlogActuator next = iterator.next();
+            next.close();
+            iterator.remove();
+        }
+    }
+
+    public boolean isRunning() {
+        for (BinlogActuator actuator : lock){
+            if(actuator.isRunning()){
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean isFreeLock() {
+        return lock.isEmpty();
+    }
+
+    public String getFileName() {
+        return fileName;
+    }
+
+    public LocalDateTime getCreateTime() {
+        return createTime;
+    }
+
+    public LocalDateTime getUpdateTime() {
+        return updateTime;
+    }
+
+    public void setUpdateTime(LocalDateTime updateTime) {
+        this.updateTime = updateTime;
+    }
+}

+ 22 - 5
dbsyncer-storage/src/main/java/org/dbsyncer/storage/query/Option.java

@@ -5,8 +5,12 @@ import org.apache.lucene.search.highlight.Highlighter;
 import org.apache.lucene.search.highlight.QueryScorer;
 import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.storage.enums.IndexFieldResolverEnum;
+import org.dbsyncer.storage.lucene.IndexFieldResolver;
 
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -17,13 +21,15 @@ import java.util.stream.Collectors;
  */
 public class Option {
 
-    private Query       query;
+    private Query query;
+
     private Set<String> highLightKeys;
-    private boolean     enableHighLightSearch;
+
+    private boolean enableHighLightSearch;
+
     private Highlighter highlighter = null;
 
-    public Option() {
-    }
+    private Map<String, IndexFieldResolverEnum> fieldResolvers = new LinkedHashMap<>();
 
     public Option(Query query) {
         this.query = query;
@@ -38,12 +44,23 @@ public class Option {
                     .collect(Collectors.toSet());
         }
         if (!CollectionUtils.isEmpty(highLightKeys)) {
-            enableHighLightSearch = true;
+            this.enableHighLightSearch = true;
             SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span style='color:red'>", "</span>");
             highlighter = new Highlighter(formatter, new QueryScorer(query));
         }
     }
 
+    public IndexFieldResolver getFieldResolver(String name){
+        if(fieldResolvers.containsKey(name)){
+            return fieldResolvers.get(name).getIndexFieldResolver();
+        }
+        return IndexFieldResolverEnum.STRING.getIndexFieldResolver();
+    }
+
+    public void addIndexFieldResolverEnum(String name, IndexFieldResolverEnum fieldResolver){
+        fieldResolvers.putIfAbsent(name, fieldResolver);
+    }
+
     public Query getQuery() {
         return query;
     }

+ 5 - 5
dbsyncer-storage/src/main/java/org/dbsyncer/storage/support/DiskStorageServiceImpl.java

@@ -16,7 +16,7 @@ import org.dbsyncer.storage.lucene.Shard;
 import org.dbsyncer.storage.query.Option;
 import org.dbsyncer.storage.query.Param;
 import org.dbsyncer.storage.query.Query;
-import org.dbsyncer.storage.util.ParamsUtil;
+import org.dbsyncer.storage.util.DocumentUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -99,14 +99,14 @@ public class DiskStorageServiceImpl extends AbstractStorageService {
     @Override
     public void insert(StorageEnum type, String collection, Map params) throws IOException {
         createShardIfNotExist(collection);
-        Document doc = ParamsUtil.convertConfig2Doc(params);
+        Document doc = DocumentUtil.convertConfig2Doc(params);
         map.get(collection).insert(doc);
     }
 
     @Override
     public void update(StorageEnum type, String collection, Map params) throws IOException {
         createShardIfNotExist(collection);
-        Document doc = ParamsUtil.convertConfig2Doc(params);
+        Document doc = DocumentUtil.convertConfig2Doc(params);
         IndexableField field = doc.getField(ConfigConstant.CONFIG_MODEL_ID);
         map.get(collection).update(new Term(ConfigConstant.CONFIG_MODEL_ID, field.stringValue()), doc);
     }
@@ -131,14 +131,14 @@ public class DiskStorageServiceImpl extends AbstractStorageService {
     @Override
     public void insertLog(StorageEnum type, String collection, Map<String, Object> params) throws IOException {
         createShardIfNotExist(collection);
-        Document doc = ParamsUtil.convertLog2Doc(params);
+        Document doc = DocumentUtil.convertLog2Doc(params);
         map.get(collection).insert(doc);
     }
 
     @Override
     public void insertData(StorageEnum type, String collection, List<Map> list) throws IOException {
         createShardIfNotExist(collection);
-        List<Document> docs = list.stream().map(r -> ParamsUtil.convertData2Doc(r)).collect(Collectors.toList());
+        List<Document> docs = list.stream().map(r -> DocumentUtil.convertData2Doc(r)).collect(Collectors.toList());
         map.get(collection).insertBatch(docs);
     }
 

+ 32 - 49
dbsyncer-storage/src/main/java/org/dbsyncer/storage/support/MysqlStorageServiceImpl.java

@@ -13,10 +13,10 @@ import org.dbsyncer.connector.constant.ConnectorConstant;
 import org.dbsyncer.connector.constant.DatabaseConstant;
 import org.dbsyncer.connector.database.Database;
 import org.dbsyncer.connector.database.DatabaseConnectorMapper;
-import org.dbsyncer.connector.database.ds.SimpleConnection;
 import org.dbsyncer.connector.enums.ConnectorEnum;
 import org.dbsyncer.connector.enums.SetterEnum;
 import org.dbsyncer.connector.enums.SqlBuilderEnum;
+import org.dbsyncer.connector.util.DatabaseUtil;
 import org.dbsyncer.storage.AbstractStorageService;
 import org.dbsyncer.storage.StorageException;
 import org.dbsyncer.storage.constant.ConfigConstant;
@@ -34,8 +34,6 @@ import org.springframework.util.Assert;
 
 import javax.annotation.PostConstruct;
 import java.io.*;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -66,6 +64,7 @@ public class MysqlStorageServiceImpl extends AbstractStorageService {
     private static final String TRUNCATE_TABLE = "TRUNCATE TABLE %s";
     private static final String TABLE_CREATE_TIME = "create_time";
     private static final String TABLE_UPDATE_TIME = "update_time";
+    private final Object LOCK = new Object();
 
     @Autowired
     private ConnectorFactory connectorFactory;
@@ -86,27 +85,7 @@ public class MysqlStorageServiceImpl extends AbstractStorageService {
         config.setConnectorType(ConnectorEnum.MYSQL.getType());
         connectorMapper = (DatabaseConnectorMapper) connectorFactory.connect(config);
         connector = (Database) connectorFactory.getConnector(connectorMapper);
-
-        // 获取数据库名称
-        database = connectorMapper.execute(databaseTemplate -> {
-            Connection conn = databaseTemplate.getConnection();
-            DatabaseMetaData metaData = conn.getMetaData();
-            String driverVersion = metaData.getDriverVersion();
-            String databaseProductVersion = metaData.getDatabaseProductVersion();
-            boolean driverThanMysql8 = StringUtil.startsWith(driverVersion, "mysql-connector-java-8");
-            boolean dbThanMysql8 = StringUtil.startsWith(databaseProductVersion, "8");
-            Assert.isTrue(driverThanMysql8 == dbThanMysql8, String.format("当前驱动%s和数据库%s版本不一致.", driverVersion, databaseProductVersion));
-
-            if(conn instanceof SimpleConnection){
-                SimpleConnection simpleConnection = (SimpleConnection) conn;
-                conn = simpleConnection.getConnection();
-            }
-            Class clazz = dbThanMysql8 ? conn.getClass() : conn.getClass().getSuperclass();
-            java.lang.reflect.Field field = clazz.getDeclaredField("database");
-            field.setAccessible(true);
-            Object value = field.get(conn);
-            return String.valueOf(value);
-        });
+        database = DatabaseUtil.getDatabaseName(config.getUrl());
 
         // 初始化表
         initTable();
@@ -114,20 +93,22 @@ public class MysqlStorageServiceImpl extends AbstractStorageService {
 
     @Override
     public Paging select(Query query) {
-        Executor executor = getExecutor(query.getType(), query.getCollection());
-        List<Object> queryArgs = new ArrayList<>();
-        List<Object> queryCountArgs = new ArrayList<>();
-        String querySql = buildQuerySql(query, executor, queryArgs);
-        String queryCountSql = buildQueryCountSql(query, executor, queryCountArgs);
-
-        List<Map<String, Object>> data = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForList(querySql, queryArgs.toArray()));
-        replaceHighLight(query, data);
-        Long total = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForObject(queryCountSql, queryCountArgs.toArray(), Long.class));
-
-        Paging paging = new Paging(query.getPageNum(), query.getPageSize());
-        paging.setData(data);
-        paging.setTotal(total);
-        return paging;
+        synchronized (LOCK){
+            Executor executor = getExecutor(query.getType(), query.getCollection());
+            List<Object> queryArgs = new ArrayList<>();
+            List<Object> queryCountArgs = new ArrayList<>();
+            String querySql = buildQuerySql(query, executor, queryArgs);
+            String queryCountSql = buildQueryCountSql(query, executor, queryCountArgs);
+
+            List<Map<String, Object>> data = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForList(querySql, queryArgs.toArray()));
+            replaceHighLight(query, data);
+            Long total = connectorMapper.execute(databaseTemplate -> databaseTemplate.queryForObject(queryCountSql, queryCountArgs.toArray(), Long.class));
+
+            Paging paging = new Paging(query.getPageNum(), query.getPageSize());
+            paging.setData(data);
+            paging.setTotal(total);
+            return paging;
+        }
     }
 
     @Override
@@ -155,17 +136,19 @@ public class MysqlStorageServiceImpl extends AbstractStorageService {
 
     @Override
     public void deleteAll(StorageEnum type, String table) {
-        Executor executor = getExecutor(type, table);
-        if (executor.isSystemType()) {
-            String sql = String.format(TRUNCATE_TABLE, PREFIX_TABLE.concat(table));
-            executeSql(sql);
-            return;
-        }
+        synchronized (LOCK){
+            Executor executor = getExecutor(type, table);
+            if (executor.isSystemType()) {
+                String sql = String.format(TRUNCATE_TABLE, PREFIX_TABLE.concat(table));
+                executeSql(sql);
+                return;
+            }
 
-        if (tables.containsKey(table)) {
-            tables.remove(table);
-            String sql = String.format(DROP_TABLE, PREFIX_TABLE.concat(table));
-            executeSql(sql);
+            if (tables.containsKey(table)) {
+                tables.remove(table);
+                String sql = String.format(DROP_TABLE, PREFIX_TABLE.concat(table));
+                executeSql(sql);
+            }
         }
     }
 
@@ -321,7 +304,7 @@ public class MysqlStorageServiceImpl extends AbstractStorageService {
         }
 
         List<Field> fields = executor.getFieldPairs().stream().map(p -> new Field(p.columnName, p.labelName)).collect(Collectors.toList());
-        final SqlBuilderConfig config = new SqlBuilderConfig(connector, table, ConfigConstant.CONFIG_MODEL_ID, fields, "", "");
+        final SqlBuilderConfig config = new SqlBuilderConfig(connector, "", table, ConfigConstant.CONFIG_MODEL_ID, fields, "", "");
 
         String query = SqlBuilderEnum.QUERY.getSqlBuilder().buildQuerySql(config);
         String insert = SqlBuilderEnum.INSERT.getSqlBuilder().buildSql(config);

+ 239 - 0
dbsyncer-storage/src/main/java/org/dbsyncer/storage/util/BinlogMessageUtil.java

@@ -0,0 +1,239 @@
+package org.dbsyncer.storage.util;
+
+import com.google.protobuf.ByteString;
+import oracle.sql.BLOB;
+import oracle.sql.CLOB;
+import oracle.sql.TIMESTAMP;
+import org.apache.commons.io.IOUtils;
+import org.dbsyncer.storage.binlog.BinlogColumnValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.InputStream;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.sql.*;
+import java.util.BitSet;
+
+/**
+ * Java语言提供了八种基本类型,六种数字类型(四个整数型,两个浮点型),一种字符类型,一种布尔型。
+ * <p>
+ * <ol>
+ * <li>整数:包括int,short,byte,long</li>
+ * <li>浮点型:float,double</li>
+ * <li>字符:char</li>
+ * <li>布尔:boolean</li>
+ * </ol>
+ *
+ * <pre>
+ * 类型     长度     大小      最小值     最大值
+ * byte     1Byte    8-bit     -128       +127
+ * short    2Byte    16-bit    -2^15      +2^15-1
+ * int      4Byte    32-bit    -2^31      +2^31-1
+ * long     8Byte    64-bit    -2^63      +2^63-1
+ * float    4Byte    32-bit    IEEE754    IEEE754
+ * double   8Byte    64-bit    IEEE754    IEEE754
+ * char     2Byte    16-bit    Unicode 0  Unicode 2^16-1
+ * boolean  8Byte    64-bit
+ * </pre>
+ *
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/7/14 22:07
+ */
+public abstract class BinlogMessageUtil {
+
+    private static final Logger logger = LoggerFactory.getLogger(BinlogMessageUtil.class);
+
+    private static final ByteBuffer buffer = ByteBuffer.allocate(8);
+
+    private static final BinlogColumnValue value = new BinlogColumnValue();
+
+    public static ByteString serializeValue(Object v) {
+        String type = v.getClass().getName();
+        switch (type) {
+            // 字节
+            case "[B":
+                return ByteString.copyFrom((byte[]) v);
+
+            // 字符串
+            case "java.lang.String":
+                return ByteString.copyFromUtf8((String) v);
+
+            // 时间
+            case "java.sql.Timestamp":
+                buffer.clear();
+                Timestamp timestamp = (Timestamp) v;
+                buffer.putLong(timestamp.getTime());
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+            case "java.sql.Date":
+                buffer.clear();
+                Date date = (Date) v;
+                buffer.putLong(date.getTime());
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+            case "java.sql.Time":
+                buffer.clear();
+                Time time = (Time) v;
+                buffer.putLong(time.getTime());
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+
+            // 数字
+            case "java.lang.Integer":
+                buffer.clear();
+                buffer.putInt((Integer) v);
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 4);
+            case "java.lang.Long":
+                buffer.clear();
+                buffer.putLong((Long) v);
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+            case "java.lang.Short":
+                buffer.clear();
+                buffer.putShort((Short) v);
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 2);
+            case "java.lang.Float":
+                buffer.clear();
+                buffer.putFloat((Float) v);
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 4);
+            case "java.lang.Double":
+                buffer.clear();
+                buffer.putDouble((Double) v);
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+            case "java.math.BigDecimal":
+                BigDecimal bigDecimal = (BigDecimal) v;
+                return ByteString.copyFromUtf8(bigDecimal.toString());
+            case "java.util.BitSet":
+                BitSet bitSet = (BitSet) v;
+                return ByteString.copyFrom(bitSet.toByteArray());
+
+            // 布尔(1为true;0为false)
+            case "java.lang.Boolean":
+                buffer.clear();
+                Boolean b = (Boolean) v;
+                buffer.putShort((short) (b ? 1 : 0));
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 2);
+            case "oracle.sql.TIMESTAMP":
+                buffer.clear();
+                TIMESTAMP timeStamp = (TIMESTAMP) v;
+                try {
+                    buffer.putLong(timeStamp.timestampValue().getTime());
+                } catch (SQLException e) {
+                    logger.error(e.getMessage());
+                }
+                buffer.flip();
+                return ByteString.copyFrom(buffer, 8);
+            case "oracle.sql.BLOB":
+                return ByteString.copyFrom(getBytes((BLOB) v));
+            case "oracle.sql.CLOB":
+                return ByteString.copyFrom(getBytes((CLOB) v));
+            default:
+                logger.error("Unsupported serialize value type:{}", type);
+                return null;
+        }
+    }
+
+    public static Object deserializeValue(int type, ByteString v) {
+        value.setValue(v);
+
+        if (value.isNull()) {
+            return null;
+        }
+
+        switch (type) {
+            // 字符串
+            case Types.VARCHAR:
+            case Types.LONGVARCHAR:
+            case Types.NVARCHAR:
+            case Types.NCHAR:
+            case Types.CHAR:
+                return value.asString();
+
+            // 时间
+            case Types.TIMESTAMP:
+                return value.asTimestamp();
+            case Types.TIME:
+                return value.asTime();
+            case Types.DATE:
+                return value.asDate();
+
+            // 数字
+            case Types.INTEGER:
+            case Types.TINYINT:
+                return value.asInteger();
+            case Types.SMALLINT:
+                return value.asShort();
+            case Types.BIGINT:
+                return value.asLong();
+            case Types.FLOAT:
+            case Types.REAL:
+                return value.asFloat();
+            case Types.DOUBLE:
+                return value.asDouble();
+            case Types.DECIMAL:
+            case Types.NUMERIC:
+                return value.asBigDecimal();
+
+            // 布尔
+            case Types.BOOLEAN:
+                return value.asBoolean();
+
+            // 字节
+            case Types.BIT:
+            case Types.BINARY:
+            case Types.VARBINARY:
+            case Types.LONGVARBINARY:
+            // 二进制对象
+            case Types.NCLOB:
+            case Types.CLOB:
+            case Types.BLOB:
+                return value.asByteArray();
+
+            // 暂不支持
+            case Types.ROWID:
+                return null;
+
+            default:
+                return null;
+        }
+    }
+
+    private static byte[] getBytes(BLOB blob) {
+        InputStream is = null;
+        byte[] b = null;
+        try {
+            is = blob.getBinaryStream();
+            b = new byte[(int) blob.length()];
+            int read = is.read(b);
+            if(-1 == read){
+                return b;
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage());
+        } finally {
+            IOUtils.closeQuietly(is);
+        }
+        return b;
+    }
+
+    private static byte[] getBytes(CLOB clob) {
+        try {
+            long length = clob.length();
+            if (length > 0) {
+                return clob.getSubString(1, (int) length).getBytes(Charset.defaultCharset());
+            }
+        } catch (SQLException e) {
+            logger.error(e.getMessage());
+        }
+        return new byte[0];
+    }
+
+}

+ 21 - 2
dbsyncer-storage/src/main/java/org/dbsyncer/storage/util/ParamsUtil.java → dbsyncer-storage/src/main/java/org/dbsyncer/storage/util/DocumentUtil.java

@@ -1,6 +1,8 @@
 package org.dbsyncer.storage.util;
 
 import org.apache.lucene.document.*;
+import org.apache.lucene.util.BytesRef;
+import org.dbsyncer.storage.constant.BinlogConstant;
 import org.dbsyncer.storage.constant.ConfigConstant;
 import org.springframework.util.Assert;
 
@@ -40,8 +42,9 @@ import java.util.Map;
  * @version 1.0.0
  * @date 2019/11/19 22:07
  */
-public abstract class ParamsUtil {
-    private ParamsUtil(){}
+public abstract class DocumentUtil {
+    private DocumentUtil() {
+    }
 
     public static Document convertConfig2Doc(Map params) {
         Assert.notNull(params, "Params can not be null.");
@@ -109,4 +112,20 @@ public abstract class ParamsUtil {
         return doc;
     }
 
+    public static Document convertBinlog2Doc(String messageId, int status, BytesRef bytes, long updateTime) {
+        Document doc = new Document();
+        doc.add(new StringField(BinlogConstant.BINLOG_ID, messageId, Field.Store.YES));
+
+        doc.add(new IntPoint(BinlogConstant.BINLOG_STATUS, status));
+        doc.add(new StoredField(BinlogConstant.BINLOG_STATUS, status));
+
+        doc.add(new BinaryDocValuesField(BinlogConstant.BINLOG_CONTENT, bytes));
+        doc.add(new StoredField(BinlogConstant.BINLOG_CONTENT, bytes));
+
+        doc.add(new LongPoint(BinlogConstant.BINLOG_TIME, updateTime));
+        doc.add(new StoredField(BinlogConstant.BINLOG_TIME, updateTime));
+        doc.add(new NumericDocValuesField(BinlogConstant.BINLOG_TIME, updateTime));
+        return doc;
+    }
+
 }

+ 2 - 2
dbsyncer-storage/src/main/proto/BinlogMessageProto.proto

@@ -8,7 +8,7 @@ option optimize_for = SPEED;
 message BinlogMessage {
     string table_group_id = 1;
     EventEnum event = 2;
-    repeated Data data = 3;
+    BinlogMap data = 3;
 }
 
 enum EventEnum {
@@ -17,6 +17,6 @@ enum EventEnum {
     DELETE = 2;
 }
 
-message Data {
+message BinlogMap {
     map<string, bytes> row = 1;
 }

+ 0 - 75
dbsyncer-storage/src/main/test/BinlogMessageTest.java

@@ -1,75 +0,0 @@
-import com.google.protobuf.ByteString;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.dbsyncer.common.util.JsonUtil;
-import org.dbsyncer.storage.binlog.Binlog;
-import org.dbsyncer.storage.binlog.BinlogPipeline;
-import org.dbsyncer.storage.binlog.proto.BinlogMessage;
-import org.dbsyncer.storage.binlog.proto.Data;
-import org.dbsyncer.storage.binlog.proto.EventEnum;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.Charset;
-
-/**
- * @author AE86
- * @version 1.0.0
- * @date 2022/6/18 23:46
- */
-public class BinlogMessageTest {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    private BinlogPipeline pipeline;
-
-    @Before
-    public void init() throws IOException {
-        File dir = new File(System.getProperty("user.dir")).getParentFile();
-        String path = new StringBuilder(dir.getAbsolutePath()).append(File.separatorChar)
-                .append("data").append(File.separatorChar)
-                .append("binlog").append(File.separatorChar)
-                .append("WriterBinlog").append(File.separatorChar)
-                .toString();
-        File configPath = new File(path + "binlog.config");
-        String configJson = FileUtils.readFileToString(configPath, Charset.defaultCharset());
-        Binlog binlog = JsonUtil.jsonToObj(configJson, Binlog.class);
-        pipeline = new BinlogPipeline(new File(path + binlog.getFileName()), binlog.getPosition());
-    }
-
-    @After
-    public void close() {
-        IOUtils.closeQuietly(pipeline);
-    }
-
-    @Test
-    public void testBinlogMessage() throws IOException {
-        write("123456", "abc");
-        write("000111", "xyz");
-        write("888999", "jkl");
-
-        byte[] line;
-        while (null != (line = pipeline.readLine())) {
-            BinlogMessage binlogMessage = BinlogMessage.parseFrom(line);
-            logger.info(binlogMessage.toString());
-        }
-    }
-
-    private void write(String tableGroupId, String key) throws IOException {
-        BinlogMessage build = BinlogMessage.newBuilder()
-                .setTableGroupId(tableGroupId)
-                .setEvent(EventEnum.UPDATE)
-                .addData(Data.newBuilder().putRow(key, ByteString.copyFromUtf8("hello,中国")).build())
-                .build();
-        byte[] bytes = build.toByteArray();
-        logger.info("序列化长度:{}", bytes.length);
-        logger.info("{}", bytes);
-        pipeline.write(build);
-    }
-
-}

Some files were not shown because too many files changed in this diff