Browse Source

Merge remote-tracking branch 'origin/master' into v_2.0

穿云 2 months ago
parent
commit
3899c21818
100 changed files with 1691 additions and 2527 deletions
  1. 1 1
      README.md
  2. 8 0
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/ConnectorService.java
  3. 9 0
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/TableGroupService.java
  4. 2 2
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/AbstractChecker.java
  5. 2 94
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/mapping/MappingChecker.java
  6. 1 0
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/system/SystemConfigChecker.java
  7. 34 7
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/tablegroup/TableGroupChecker.java
  8. 18 12
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/ConnectorServiceImpl.java
  9. 23 13
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/DataSyncServiceImpl.java
  10. 13 8
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MappingServiceImpl.java
  11. 10 31
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MetricReporter.java
  12. 3 4
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MonitorServiceImpl.java
  13. 18 6
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/PluginServiceImpl.java
  14. 63 22
      dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/TableGroupServiceImpl.java
  15. 1 1
      dbsyncer-common/src/main/java/org/dbsyncer/common/model/Result.java
  16. 7 2
      dbsyncer-common/src/main/java/org/dbsyncer/common/scheduled/impl/ScheduledTaskServiceImpl.java
  17. 4 0
      dbsyncer-common/src/main/java/org/dbsyncer/common/util/StringUtil.java
  18. 0 1
      dbsyncer-connector/dbsyncer-connector-base/pom.xml
  19. 31 12
      dbsyncer-connector/dbsyncer-connector-base/src/main/java/org/dbsyncer/connector/base/ConnectorFactory.java
  20. 18 14
      dbsyncer-connector/dbsyncer-connector-base/src/test/java/ConnectionTest.java
  21. 22 33
      dbsyncer-connector/dbsyncer-connector-elasticsearch/src/main/java/org/dbsyncer/connector/elasticsearch/ElasticsearchConnector.java
  22. 11 19
      dbsyncer-connector/dbsyncer-connector-file/src/main/java/org/dbsyncer/connector/file/FileConnector.java
  23. 4 4
      dbsyncer-connector/dbsyncer-connector-file/src/main/java/org/dbsyncer/connector/file/model/FileResolver.java
  24. 6 17
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnector.java
  25. 8 6
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/JsonToMapDeserializer.java
  26. 1 2
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/DQLMySQLConnector.java
  27. 21 0
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/MySQLConnector.java
  28. 2 2
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/cdc/MySQLListener.java
  29. 1 17
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/MySQLSchemaResolver.java
  30. 0 51
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLBooleanType.java
  31. 20 0
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLByteType.java
  32. 19 0
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLShortType.java
  33. 1 1
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/storage/MySQLStorageService.java
  34. 1 1
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/resources/public/connector/addDqlMySQL.html
  35. 1 1
      dbsyncer-connector/dbsyncer-connector-mysql/src/main/resources/public/connector/addMySQL.html
  36. 1 2
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/DQLOracleConnector.java
  37. 1 2
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/OracleConnector.java
  38. 4 10
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/cdc/OracleListener.java
  39. 0 383
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/dcn/DBChangeNotification.java
  40. 0 19
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/dcn/RowEventListener.java
  41. 115 117
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/logminer/LogMiner.java
  42. 137 118
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/logminer/LogMinerHelper.java
  43. 0 29
      dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/model/DCNEvent.java
  44. 0 104
      dbsyncer-connector/dbsyncer-connector-oracle/src/test/java/DBChangeNotificationTest.java
  45. 0 94
      dbsyncer-connector/dbsyncer-connector-oracle/src/test/java/LinkedBlockingQueueTest.java
  46. 1 2
      dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/DQLPostgreSQLConnector.java
  47. 1 2
      dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/PostgreSQLConnector.java
  48. 2 2
      dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/decoder/impl/PgOutputMessageDecoder.java
  49. 1 3
      dbsyncer-connector/dbsyncer-connector-sqlite/src/main/java/org/dbsyncer/connector/sqlite/DqlSQLiteConnector.java
  50. 1 2
      dbsyncer-connector/dbsyncer-connector-sqlite/src/main/java/org/dbsyncer/connector/sqlite/SQLiteConnector.java
  51. 0 0
      dbsyncer-connector/dbsyncer-connector-sqlite/src/main/resources/static/img/SQLite.png
  52. 1 2
      dbsyncer-connector/dbsyncer-connector-sqlserver/src/main/java/org/dbsyncer/connector/sqlserver/DQLSqlServerConnector.java
  53. 23 2
      dbsyncer-connector/dbsyncer-connector-sqlserver/src/main/java/org/dbsyncer/connector/sqlserver/SqlServerConnector.java
  54. 15 20
      dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/FullPuller.java
  55. 42 50
      dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/IncrementPuller.java
  56. 5 9
      dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/PreloadTemplate.java
  57. 2 4
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/CacheService.java
  58. 26 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/TableGroupContext.java
  59. 9 38
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/ParserConsumer.java
  60. 0 87
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/impl/LogConsumer.java
  61. 0 42
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/impl/QuartzConsumer.java
  62. 4 5
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/AlterStrategy.java
  63. 5 23
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/DDLParser.java
  64. 17 76
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/AddStrategy.java
  65. 17 15
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/ChangeStrategy.java
  66. 12 50
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/DropStrategy.java
  67. 16 20
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/ModifyStrategy.java
  68. 119 68
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/impl/DDLParserImpl.java
  69. 0 9
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/enums/MetaEnum.java
  70. 5 5
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/event/RefreshOffsetEvent.java
  71. 19 10
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferActuator.java
  72. 60 27
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/BufferActuatorRouter.java
  73. 118 92
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/GeneralBufferActuator.java
  74. 7 7
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/TableGroupBufferActuator.java
  75. 3 3
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/CacheServiceImpl.java
  76. 35 61
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/OperationTemplate.java
  77. 39 39
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/ParserComponentImpl.java
  78. 2 3
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/ProfileComponentImpl.java
  79. 85 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/TableGroupContextImpl.java
  80. 16 5
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/AbstractWriter.java
  81. 0 166
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/FieldPicker.java
  82. 40 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Group.java
  83. 2 2
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Meta.java
  84. 144 27
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Picker.java
  85. 1 1
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/QueryConfig.java
  86. 13 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/SystemConfig.java
  87. 47 0
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/TableGroupPicker.java
  88. 0 3
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Task.java
  89. 7 14
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterRequest.java
  90. 3 15
      dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterResponse.java
  91. 5 5
      dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/PluginFactory.java
  92. 2 71
      dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/impl/FullPluginContext.java
  93. 27 30
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/DDLConfig.java
  94. 3 3
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/DatabaseConfig.java
  95. 1 1
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/ListenerConfig.java
  96. 0 67
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/WriterBatchConfig.java
  97. 18 31
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/AbstractConnector.java
  98. 1 3
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/AbstractDQLConnector.java
  99. 24 36
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/AbstractDatabaseConnector.java
  100. 3 7
      dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/sqlbuilder/SqlBuilderQuery.java

+ 1 - 1
README.md

@@ -21,7 +21,7 @@
 | 连接器        | 数据源 | 目标源 | 支持版本(包含以下) |
 |------------|---|---|-----------------------|
 | MySQL      | ✔ |  ✔ | 5.7.19以上 |
-| Oracle     | ✔ |  ✔ | 11g-19c |
+| Oracle     | ✔ |  ✔ | 10g-19c |
 | SqlServer  | ✔ |  ✔ | 2008以上 |
 | PostgreSQL | ✔ |  ✔ | 9.5.25以上 |
 | ES         | ✔ |  ✔ | 6.0.0-8.15.3 |

+ 8 - 0
dbsyncer-biz/src/main/java/org/dbsyncer/biz/ConnectorService.java

@@ -78,4 +78,12 @@ public interface ConnectorService {
      * @return
      */
     boolean isAlive(String id);
+
+    /**
+     * 获取位点信息
+     *
+     * @param params
+     * @return
+     */
+    Object getPosition(String params);
 }

+ 9 - 0
dbsyncer-biz/src/main/java/org/dbsyncer/biz/TableGroupService.java

@@ -3,6 +3,7 @@
  */
 package org.dbsyncer.biz;
 
+import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.TableGroup;
 
 import java.util.List;
@@ -60,4 +61,12 @@ public interface TableGroupService {
      */
     List<TableGroup> getTableGroupAll(String mappingId);
 
+    /**
+     * 更新元信息
+     *
+     * @param mapping
+     * @param metaSnapshot
+     */
+    void updateMeta(Mapping mapping, String metaSnapshot);
+
 }

+ 2 - 2
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/AbstractChecker.java

@@ -22,9 +22,9 @@ import org.springframework.util.Assert;
 import javax.annotation.Resource;
 import java.time.Instant;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @author AE86
@@ -74,7 +74,7 @@ public abstract class AbstractChecker implements Checker {
     protected void modifySuperConfigModel(AbstractConfigModel model, Map<String, String> params) {
         // 全局参数
         String mappingParams = params.get("params");
-        model.setParams(StringUtil.isNotBlank(mappingParams) ? JsonUtil.jsonToObj(mappingParams, Map.class) : new LinkedHashMap());
+        model.setParams(StringUtil.isNotBlank(mappingParams) ? JsonUtil.jsonToObj(mappingParams, Map.class) : new ConcurrentHashMap<>());
 
         // 过滤条件
         String filterJson = params.get("filter");

+ 2 - 94
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/mapping/MappingChecker.java

@@ -6,15 +6,12 @@ package org.dbsyncer.biz.checker.impl.mapping;
 import org.dbsyncer.biz.checker.AbstractChecker;
 import org.dbsyncer.biz.checker.MappingConfigChecker;
 import org.dbsyncer.biz.checker.impl.tablegroup.TableGroupChecker;
-import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.common.util.JsonUtil;
 import org.dbsyncer.common.util.NumberUtil;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.model.ConfigModel;
 import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
-import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.sdk.config.ListenerConfig;
 import org.dbsyncer.sdk.constant.ConfigConstant;
 import org.dbsyncer.sdk.enums.ListenerTypeEnum;
@@ -25,12 +22,8 @@ import org.springframework.stereotype.Component;
 import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
-import java.time.Instant;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.stream.Collectors;
 
 /**
  * @author AE86
@@ -113,9 +106,8 @@ public class MappingChecker extends AbstractChecker {
         // 修改高级配置:过滤条件/转换配置/插件配置
         this.modifySuperConfigModel(mapping, params);
 
-        // 更新meta
-        String metaSnapshot = params.get("metaSnapshot");
-        updateMeta(mapping, metaSnapshot);
+        // 合并关联的映射关系配置
+        tableGroupChecker.batchMergeConfig(mapping, params);
 
         return mapping;
     }
@@ -131,48 +123,6 @@ public class MappingChecker extends AbstractChecker {
         mapping.setMetaId(id);
     }
 
-    /**
-     * 更新元信息
-     *
-     * @param mapping
-     */
-    public void updateMeta(Mapping mapping) {
-        updateMeta(mapping, null);
-    }
-
-    /**
-     * 合并关联的映射关系配置
-     *
-     * @param mapping
-     * @param params
-     */
-    public void batchMergeTableGroupConfig(Mapping mapping, Map<String, String> params) {
-        List<TableGroup> groupAll = profileComponent.getTableGroupAll(mapping.getId());
-        if (!CollectionUtils.isEmpty(groupAll)) {
-            // 手动排序
-            String[] sortedTableGroupIds = StringUtil.split(params.get("sortedTableGroupIds"), StringUtil.VERTICAL_LINE);
-            if (null != sortedTableGroupIds && sortedTableGroupIds.length > 0) {
-                Map<String, TableGroup> tableGroupMap = groupAll.stream().collect(Collectors.toMap(TableGroup::getId, f -> f, (k1, k2) -> k1));
-                groupAll.clear();
-                int size = sortedTableGroupIds.length;
-                int i = size;
-                while (i > 0) {
-                    TableGroup g = tableGroupMap.get(sortedTableGroupIds[size - i]);
-                    Assert.notNull(g, "Invalid sorted tableGroup.");
-                    g.setIndex(i);
-                    groupAll.add(g);
-                    i--;
-                }
-            }
-
-            // 合并配置
-            for (TableGroup g : groupAll) {
-                tableGroupChecker.mergeConfig(mapping, g);
-                profileComponent.editConfigModel(g);
-            }
-        }
-    }
-
     /**
      * 修改监听器配置
      *
@@ -188,46 +138,4 @@ public class MappingChecker extends AbstractChecker {
         listener.setEnableDDL(StringUtil.isNotBlank(params.get("enableDDL")));
     }
 
-    /**
-     * 更新元信息
-     *
-     * @param mapping
-     * @param metaSnapshot
-     */
-    private void updateMeta(Mapping mapping, String metaSnapshot) {
-        Meta meta = profileComponent.getMeta(mapping.getMetaId());
-        Assert.notNull(meta, "驱动meta不存在.");
-
-        // 清空状态
-        meta.clear();
-
-        // 手动配置增量点
-        if (StringUtil.isNotBlank(metaSnapshot)) {
-            Map snapshot = JsonUtil.jsonToObj(metaSnapshot, HashMap.class);
-            if (!CollectionUtils.isEmpty(snapshot)) {
-                meta.setSnapshot(snapshot);
-            }
-        }
-
-        getMetaTotal(meta, mapping.getModel());
-
-        meta.setUpdateTime(Instant.now().toEpochMilli());
-        profileComponent.editConfigModel(meta);
-    }
-
-    private void getMetaTotal(Meta meta, String model) {
-        // 全量同步
-        if (ModelEnum.isFull(model)) {
-            // 统计tableGroup总条数
-            AtomicLong count = new AtomicLong(0);
-            List<TableGroup> groupAll = profileComponent.getTableGroupAll(meta.getMappingId());
-            if (!CollectionUtils.isEmpty(groupAll)) {
-                for (TableGroup g : groupAll) {
-                    count.getAndAdd(g.getSourceTable().getCount());
-                }
-            }
-            meta.setTotal(count);
-        }
-    }
-
 }

+ 1 - 0
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/system/SystemConfigChecker.java

@@ -56,6 +56,7 @@ public class SystemConfigChecker extends AbstractChecker {
         params.put("enableStorageWriteFull", StringUtil.isNotBlank(params.get("enableStorageWriteFull")) ? "true" : "false");
         params.put("enableCDN", StringUtil.isNotBlank(params.get("enableCDN")) ? "true" : "false");
         params.put("enableWatermark", StringUtil.isNotBlank(params.get("enableWatermark")) ? "true" : "false");
+        params.put("enableSchemaResolver", StringUtil.isNotBlank(params.get("enableSchemaResolver")) ? "true" : "false");
         String watermark = params.get("watermark");
         if (StringUtil.isNotBlank(watermark)) {
             Assert.isTrue(watermark.length() <= 64, "允许水印内容最多输入64个字.");

+ 34 - 7
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/tablegroup/TableGroupChecker.java

@@ -27,13 +27,7 @@ import org.springframework.stereotype.Component;
 import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.stream.Collectors;
 
@@ -162,6 +156,39 @@ public class TableGroupChecker extends AbstractChecker {
         return new Table(tableName, metaInfo.getTableType(), metaInfo.getColumn(), metaInfo.getSql(), metaInfo.getIndexType());
     }
 
+    /**
+     * 合并表配置
+     *
+     * @param mapping
+     * @param params
+     */
+    public void batchMergeConfig(Mapping mapping, Map<String, String> params) {
+        List<TableGroup> groupAll = profileComponent.getTableGroupAll(mapping.getId());
+        if (!CollectionUtils.isEmpty(groupAll)) {
+            // 手动排序
+            String[] sortedTableGroupIds = StringUtil.split(params.get("sortedTableGroupIds"), StringUtil.VERTICAL_LINE);
+            if (null != sortedTableGroupIds && sortedTableGroupIds.length > 0) {
+                Map<String, TableGroup> tableGroupMap = groupAll.stream().collect(Collectors.toMap(TableGroup::getId, f -> f, (k1, k2) -> k1));
+                groupAll.clear();
+                int size = sortedTableGroupIds.length;
+                int i = size;
+                while (i > 0) {
+                    TableGroup g = tableGroupMap.get(sortedTableGroupIds[size - i]);
+                    Assert.notNull(g, "Invalid sorted tableGroup.");
+                    g.setIndex(i);
+                    groupAll.add(g);
+                    i--;
+                }
+            }
+
+            // 合并配置
+            for (TableGroup g : groupAll) {
+                mergeConfig(mapping, g);
+                profileComponent.editConfigModel(g);
+            }
+        }
+    }
+
     private void checkRepeatedTable(String mappingId, String sourceTable, String targetTable) {
         List<TableGroup> list = profileComponent.getTableGroupAll(mappingId);
         if (!CollectionUtils.isEmpty(list)) {

+ 18 - 12
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/ConnectorServiceImpl.java

@@ -16,8 +16,9 @@ import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.model.ConfigModel;
 import org.dbsyncer.parser.model.Connector;
 import org.dbsyncer.parser.model.Mapping;
-import org.dbsyncer.sdk.model.ConnectorConfig;
+import org.dbsyncer.sdk.connector.ConnectorInstance;
 import org.dbsyncer.sdk.constant.ConfigConstant;
+import org.dbsyncer.sdk.model.ConnectorConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Service;
@@ -25,13 +26,12 @@ import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashSet;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -44,7 +44,7 @@ public class ConnectorServiceImpl extends BaseServiceImpl implements ConnectorSe
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private Map<String, Boolean> health = new LinkedHashMap<>();
+    private final Map<String, Boolean> health = new ConcurrentHashMap<>();
 
     @Resource
     private ProfileComponent profileComponent;
@@ -117,17 +117,16 @@ public class ConnectorServiceImpl extends BaseServiceImpl implements ConnectorSe
 
     @Override
     public List<Connector> getConnectorAll() {
-        List<Connector> list = profileComponent.getConnectorAll()
+        return profileComponent.getConnectorAll()
                 .stream()
                 .sorted(Comparator.comparing(Connector::getUpdateTime).reversed())
                 .collect(Collectors.toList());
-        return list;
     }
 
     @Override
     public List<String> getConnectorTypeAll() {
         ArrayList<String> connectorTypes = new ArrayList<>(connectorFactory.getConnectorTypeAll());
-        Collections.sort(connectorTypes, Comparator.comparing(String::toString));
+        connectorTypes.sort(Comparator.comparing(String::toString));
         return connectorTypes;
     }
 
@@ -150,14 +149,14 @@ public class ConnectorServiceImpl extends BaseServiceImpl implements ConnectorSe
 
         // 移除删除的连接器
         Set<String> remove = new HashSet<>();
-        health.keySet().forEach(k -> {
-            if (!exist.contains(k)) {
-                remove.add(k);
+        for (Map.Entry<String, Boolean> entry : health.entrySet()) {
+            if (!exist.contains(entry.getKey())) {
+                remove.add(entry.getKey());
             }
-        });
+        }
 
         if (!CollectionUtils.isEmpty(remove)) {
-            remove.forEach(k -> health.remove(k));
+            remove.forEach(health::remove);
         }
     }
 
@@ -166,6 +165,13 @@ public class ConnectorServiceImpl extends BaseServiceImpl implements ConnectorSe
         return health.containsKey(id) && health.get(id);
     }
 
+    @Override
+    public Object getPosition(String id) {
+        Connector connector = getConnector(id);
+        ConnectorInstance connectorInstance = connectorFactory.connect(connector.getConfig());
+        return connectorFactory.getPosition(connectorInstance);
+    }
+
     private boolean isAlive(ConnectorConfig config) {
         try {
             return connectorFactory.isAlive(config);

+ 23 - 13
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/DataSyncServiceImpl.java

@@ -19,14 +19,15 @@ import org.dbsyncer.parser.flush.impl.BufferActuatorRouter;
 import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.parser.model.Picker;
 import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.sdk.constant.ConfigConstant;
+import org.dbsyncer.sdk.constant.ConnectorConstant;
 import org.dbsyncer.sdk.enums.StorageEnum;
-import org.dbsyncer.sdk.listener.event.RowChangedEvent;
-import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.filter.FieldResolver;
 import org.dbsyncer.sdk.filter.Query;
+import org.dbsyncer.sdk.listener.event.RowChangedEvent;
+import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.storage.StorageService;
 import org.dbsyncer.storage.binlog.proto.BinlogMap;
-import org.dbsyncer.sdk.constant.ConfigConstant;
 import org.dbsyncer.storage.util.BinlogMessageUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,9 +40,9 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -111,18 +112,25 @@ public class DataSyncServiceImpl implements DataSyncService {
             return Collections.EMPTY_MAP;
         }
 
-        // 3、反序列
+        // 3、获取DDL
         Map<String, Object> target = new HashMap<>();
-        final Picker picker = new Picker(tableGroup.getFieldMapping());
-        final Map<String, Field> fieldMap = picker.getTargetFieldMap();
         BinlogMap message = BinlogMap.parseFrom(bytes);
+        String event = (String) row.get(ConfigConstant.DATA_EVENT);
+        if (StringUtil.equals(event, ConnectorConstant.OPERTION_ALTER)) {
+            message.getRowMap().forEach((k, v) -> target.put(k, v.toStringUtf8()));
+            return target;
+        }
+
+        // 4、反序列
+        final Picker picker = new Picker(tableGroup);
+        final Map<String, Field> fieldMap = picker.getTargetFieldMap();
         message.getRowMap().forEach((k, v) -> {
             if (fieldMap.containsKey(k)) {
                 try {
                     Object val = BinlogMessageUtil.deserializeValue(fieldMap.get(k).getType(), v);
                     // 处理二进制对象显示
                     if (prettyBytes) {
-                        if (null != val && val instanceof byte[]) {
+                        if (val instanceof byte[]) {
                             byte[] b = (byte[]) val;
                             if (b.length > 128) {
                                 target.put(k, String.format("byte[%d]", b.length));
@@ -163,12 +171,14 @@ public class DataSyncServiceImpl implements DataSyncService {
         }
         TableGroup tableGroup = profileComponent.getTableGroup(tableGroupId);
         String sourceTableName = tableGroup.getSourceTable().getName();
-        RowChangedEvent changedEvent = new RowChangedEvent(sourceTableName, event, Collections.EMPTY_LIST);
+
         // 转换为源字段
-        final Picker picker = new Picker(tableGroup.getFieldMapping());
-        changedEvent.setChangedRow(picker.pickSourceData(binlogData));
+        final Picker picker = new Picker(tableGroup);
+        List<Object> changedRow = picker.pickSourceData(binlogData);
+        RowChangedEvent changedEvent = new RowChangedEvent(sourceTableName, event, changedRow);
+
         // 执行同步是否成功
-        bufferActuatorRouter.execute(metaId, tableGroupId, changedEvent);
+        bufferActuatorRouter.execute(metaId, changedEvent);
         storageService.remove(StorageEnum.DATA, metaId, messageId);
         // 更新失败数
         Meta meta = profileComponent.getMeta(metaId);
@@ -181,7 +191,7 @@ public class DataSyncServiceImpl implements DataSyncService {
 
     private Map getData(String metaId, String messageId) {
         Query query = new Query(1, 1);
-        Map<String, FieldResolver> fieldResolvers = new LinkedHashMap<>();
+        Map<String, FieldResolver> fieldResolvers = new ConcurrentHashMap<>();
         fieldResolvers.put(ConfigConstant.BINLOG_DATA, (FieldResolver<IndexableField>) field -> field.binaryValue().bytes);
         query.setFieldResolverMap(fieldResolvers);
         query.addFilter(ConfigConstant.CONFIG_MODEL_ID, messageId);

+ 13 - 8
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MappingServiceImpl.java

@@ -19,6 +19,7 @@ import org.dbsyncer.connector.base.ConnectorFactory;
 import org.dbsyncer.manager.ManagerFactory;
 import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.ProfileComponent;
+import org.dbsyncer.parser.TableGroupContext;
 import org.dbsyncer.parser.model.ConfigModel;
 import org.dbsyncer.parser.model.Connector;
 import org.dbsyncer.parser.model.Mapping;
@@ -76,16 +77,18 @@ public class MappingServiceImpl extends BaseServiceImpl implements MappingServic
     @Resource
     private ConnectorFactory connectorFactory;
 
+    @Resource
+    private TableGroupContext tableGroupContext;
+
     @Override
     public String add(Map<String, String> params) {
         ConfigModel model = mappingChecker.checkAddConfigModel(params);
-        log(LogType.MappingLog.INSERT, (Mapping) model);
+        log(LogType.MappingLog.INSERT, model);
 
         String id = profileComponent.addConfigModel(model);
 
         // 匹配相似表 on
-        String autoMatchTable = params.get("autoMatchTable");
-        if (StringUtil.isNotBlank(autoMatchTable)) {
+        if (StringUtil.isNotBlank(params.get("autoMatchTable"))) {
             matchSimilarTable(model);
         }
 
@@ -131,7 +134,8 @@ public class MappingServiceImpl extends BaseServiceImpl implements MappingServic
             Mapping model = (Mapping) mappingChecker.checkEditConfigModel(params);
             log(LogType.MappingLog.UPDATE, model);
 
-            mappingChecker.batchMergeTableGroupConfig(model, params);
+            // 更新meta
+            tableGroupService.updateMeta(mapping, params.get("metaSnapshot"));
             return profileComponent.editConfigModel(model);
         }
     }
@@ -158,6 +162,9 @@ public class MappingServiceImpl extends BaseServiceImpl implements MappingServic
                 groupList.forEach(t -> profileComponent.removeTableGroup(t.getId()));
             }
 
+            // 删除驱动表映射关系
+            tableGroupContext.clear(metaId);
+
             // 删除驱动
             profileComponent.removeConfigModel(id);
             log(LogType.MappingLog.DELETE, mapping);
@@ -199,12 +206,11 @@ public class MappingServiceImpl extends BaseServiceImpl implements MappingServic
 
     @Override
     public List<MappingVo> getMappingAll() {
-        List<MappingVo> list = profileComponent.getMappingAll()
+        return profileComponent.getMappingAll()
                 .stream()
-                .map(m -> convertMapping2Vo(m))
+                .map(this::convertMapping2Vo)
                 .sorted(Comparator.comparing(MappingVo::getUpdateTime).reversed())
                 .collect(Collectors.toList());
-        return list;
     }
 
     @Override
@@ -330,7 +336,6 @@ public class MappingServiceImpl extends BaseServiceImpl implements MappingServic
                 }
             }
         }
-        mappingChecker.updateMeta(mapping);
     }
 
     private void clearMetaIfFinished(String metaId) {

+ 10 - 31
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MetricReporter.java

@@ -6,11 +6,7 @@ package org.dbsyncer.biz.impl;
 import org.dbsyncer.biz.enums.BufferActuatorMetricEnum;
 import org.dbsyncer.biz.enums.StatisticEnum;
 import org.dbsyncer.biz.enums.ThreadPoolMetricEnum;
-import org.dbsyncer.biz.model.AppReportMetric;
-import org.dbsyncer.biz.model.MappingReportMetric;
-import org.dbsyncer.biz.model.MetricResponse;
-import org.dbsyncer.biz.model.MetricResponseInfo;
-import org.dbsyncer.biz.model.Sample;
+import org.dbsyncer.biz.model.*;
 import org.dbsyncer.biz.vo.HistoryStackVo;
 import org.dbsyncer.common.metric.Bucket;
 import org.dbsyncer.common.metric.TimeRegistry;
@@ -23,10 +19,8 @@ import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.flush.BufferActuator;
 import org.dbsyncer.parser.flush.impl.BufferActuatorRouter;
-import org.dbsyncer.parser.flush.impl.TableGroupBufferActuator;
 import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
-import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.sdk.constant.ConfigConstant;
 import org.dbsyncer.sdk.constant.ConnectorConstant;
 import org.dbsyncer.sdk.enums.StorageEnum;
@@ -44,12 +38,7 @@ import java.sql.Timestamp;
 import java.time.Instant;
 import java.time.LocalDateTime;
 import java.time.temporal.ChronoUnit;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.function.Consumer;
@@ -93,9 +82,7 @@ public class MetricReporter implements ScheduledTaskJob {
 
     private final MappingReportMetric mappingReportMetric = new MappingReportMetric();
 
-    private AppReportMetric report = new AppReportMetric();
-
-    private final int SHOW_BUFFER_ACTUATOR_SIZE = 7;
+    private final AppReportMetric report = new AppReportMetric();
 
     @PostConstruct
     private void init() {
@@ -114,24 +101,16 @@ public class MetricReporter implements ScheduledTaskJob {
             bufferActuatorRouter.getRouter().forEach((metaId, group) -> {
                 Meta meta = profileComponent.getMeta(metaId);
                 Mapping mapping = profileComponent.getMapping(meta.getMappingId());
-                group.forEach((k, bufferActuator) -> {
-                    if (bufferActuator instanceof TableGroupBufferActuator) {
-                        TableGroupBufferActuator actuator = bufferActuator;
-                        TableGroup tableGroup = profileComponent.getTableGroup(actuator.getTableGroupId());
-                        String metricName = new StringBuilder()
-                                .append(tableGroup.getSourceTable().getName())
-                                .append(" > ")
-                                .append(tableGroup.getTargetTable().getName()).toString();
-                        tableList.add(collect(bufferActuator, tableGroupCode, mapping.getName(), metricName));
-                    }
-                });
+                group.forEach((k, bufferActuator) ->
+                    tableList.add(collect(bufferActuator, tableGroupCode, mapping.getName(), bufferActuator.getTableName()))
+                );
             });
-            List<MetricResponseInfo> sortList = tableList.stream()
+            list.addAll(tableList.stream()
                     .sorted(Comparator.comparing(MetricResponseInfo::getQueueUp).reversed())
-                    .collect(Collectors.toList());
-            list.addAll(sortList.size() <= SHOW_BUFFER_ACTUATOR_SIZE ? sortList : sortList.subList(0, SHOW_BUFFER_ACTUATOR_SIZE));
+                    .limit(7)
+                    .collect(Collectors.toList()));
         }
-        return list.stream().map(info -> info.getResponse()).collect(Collectors.toList());
+        return list.stream().map(MetricResponseInfo::getResponse).collect(Collectors.toList());
     }
 
     public AppReportMetric getAppReportMetric() {

+ 3 - 4
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/MonitorServiceImpl.java

@@ -14,7 +14,6 @@ import org.dbsyncer.biz.metric.MetricDetailFormatter;
 import org.dbsyncer.biz.metric.impl.CpuMetricDetailFormatter;
 import org.dbsyncer.biz.metric.impl.DiskMetricDetailFormatter;
 import org.dbsyncer.biz.metric.impl.DoubleRoundMetricDetailFormatter;
-import org.dbsyncer.biz.metric.impl.GCMetricDetailFormatter;
 import org.dbsyncer.biz.metric.impl.MemoryMetricDetailFormatter;
 import org.dbsyncer.biz.metric.impl.ValueMetricDetailFormatter;
 import org.dbsyncer.biz.model.AppReportMetric;
@@ -59,9 +58,9 @@ import java.time.LocalDateTime;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -95,7 +94,7 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic
     @Resource
     private SystemConfigService systemConfigService;
 
-    private Map<String, MetricDetailFormatter> metricDetailFormatterMap = new LinkedHashMap<>();
+    private Map<String, MetricDetailFormatter> metricDetailFormatterMap = new ConcurrentHashMap<>();
 
     @PostConstruct
     private void init() {
@@ -262,7 +261,7 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic
             return new Paging(pageNum, pageSize);
         }
         Query query = new Query(pageNum, pageSize);
-        Map<String, FieldResolver> fieldResolvers = new LinkedHashMap<>();
+        Map<String, FieldResolver> fieldResolvers = new ConcurrentHashMap<>();
         fieldResolvers.put(ConfigConstant.BINLOG_DATA, (FieldResolver<IndexableField>) field -> field.binaryValue().bytes);
         query.setFieldResolverMap(fieldResolvers);
 

+ 18 - 6
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/PluginServiceImpl.java

@@ -8,6 +8,7 @@ import org.dbsyncer.biz.PluginService;
 import org.dbsyncer.biz.vo.PluginVo;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.parser.ParserException;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.LogService;
 import org.dbsyncer.parser.LogType;
@@ -22,9 +23,9 @@ import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -90,7 +91,7 @@ public class PluginServiceImpl implements PluginService {
     }
 
     private Map<String, List<String>> getPluginClassNameMap() {
-        Map<String, List<String>> map = new HashMap<>();
+        Map<String, List<String>> map = new ConcurrentHashMap<>();
         List<Mapping> mappingAll = profileComponent.getMappingAll();
         if (CollectionUtils.isEmpty(mappingAll)) {
             return map;
@@ -99,8 +100,7 @@ public class PluginServiceImpl implements PluginService {
         for (Mapping m : mappingAll) {
             Plugin plugin = m.getPlugin();
             if (null != plugin) {
-                map.putIfAbsent(plugin.getClassName(), new ArrayList<>());
-                map.get(plugin.getClassName()).add(m.getName());
+                putPluginMap(map, plugin.getClassName(), m.getName());
                 continue;
             }
 
@@ -111,8 +111,7 @@ public class PluginServiceImpl implements PluginService {
             for (TableGroup t : tableGroupAll) {
                 Plugin p = t.getPlugin();
                 if (null != p) {
-                    map.putIfAbsent(p.getClassName(), new ArrayList<>());
-                    map.get(p.getClassName()).add(m.getName());
+                    putPluginMap(map, p.getClassName(), m.getName());
                     break;
                 }
             }
@@ -120,4 +119,17 @@ public class PluginServiceImpl implements PluginService {
 
         return map;
     }
+
+    private void putPluginMap(Map<String, List<String>> map, String className, String name) {
+        map.compute(className, (k,v) -> {
+            if (v == null) {
+                try {
+                    return new ArrayList<>();
+                } catch (Exception e) {
+                    throw new ParserException(e);
+                }
+            }
+            return v;
+        }).add(name);
+    }
 }

+ 63 - 22
dbsyncer-biz/src/main/java/org/dbsyncer/biz/impl/TableGroupServiceImpl.java

@@ -4,25 +4,25 @@
 package org.dbsyncer.biz.impl;
 
 import org.dbsyncer.biz.TableGroupService;
-import org.dbsyncer.biz.checker.Checker;
 import org.dbsyncer.biz.checker.impl.tablegroup.TableGroupChecker;
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.JsonUtil;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.model.Mapping;
+import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.sdk.constant.ConfigConstant;
+import org.dbsyncer.sdk.enums.ModelEnum;
 import org.dbsyncer.sdk.model.Field;
 import org.springframework.stereotype.Service;
 import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.time.Instant;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.stream.Stream;
 
 /**
@@ -34,7 +34,7 @@ import java.util.stream.Stream;
 public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroupService {
 
     @Resource
-    private Checker tableGroupChecker;
+    private TableGroupChecker tableGroupChecker;
 
     @Resource
     private ProfileComponent profileComponent;
@@ -42,7 +42,8 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
     @Override
     public String add(Map<String, String> params) {
         String mappingId = params.get("mappingId");
-        assertRunning(profileComponent.getMapping(mappingId));
+        Mapping mapping = profileComponent.getMapping(mappingId);
+        assertRunning(mapping);
 
         synchronized (LOCK) {
             // table1, table2
@@ -63,7 +64,9 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
             }
 
             // 合并驱动公共字段
-            mergeMappingColumn(mappingId);
+            mergeMappingColumn(mapping);
+            // 更新meta
+            updateMeta(mapping, null);
             return 1 < tableSize ? String.valueOf(tableSize) : id;
         }
     }
@@ -73,12 +76,15 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
         String id = params.get(ConfigConstant.CONFIG_MODEL_ID);
         TableGroup tableGroup = profileComponent.getTableGroup(id);
         Assert.notNull(tableGroup, "Can not find tableGroup.");
-        assertRunning(profileComponent.getMapping(tableGroup.getMappingId()));
+        Mapping mapping = profileComponent.getMapping(tableGroup.getMappingId());
+        assertRunning(mapping);
 
         TableGroup model = (TableGroup) tableGroupChecker.checkEditConfigModel(params);
         log(LogType.TableGroupLog.UPDATE, model);
-
-        return profileComponent.editTableGroup(model);
+        profileComponent.editTableGroup(model);
+        // 更新meta
+        updateMeta(mapping, null);
+        return id;
     }
 
     @Override
@@ -87,9 +93,7 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
         Assert.notNull(tableGroup, "Can not find tableGroup.");
         assertRunning(profileComponent.getMapping(tableGroup.getMappingId()));
 
-        TableGroupChecker checker = (TableGroupChecker) tableGroupChecker;
-        checker.refreshTableFields(tableGroup);
-
+        tableGroupChecker.refreshTableFields(tableGroup);
         return profileComponent.editTableGroup(tableGroup);
     }
 
@@ -97,7 +101,8 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
     public boolean remove(String mappingId, String ids) {
         Assert.hasText(mappingId, "Mapping id can not be null");
         Assert.hasText(ids, "TableGroup ids can not be null");
-        assertRunning(profileComponent.getMapping(mappingId));
+        Mapping mapping = profileComponent.getMapping(mappingId);
+        assertRunning(mapping);
 
         // 批量删除表
         Stream.of(StringUtil.split(ids, ",")).parallel().forEach(id -> {
@@ -107,7 +112,9 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
         });
 
         // 合并驱动公共字段
-        mergeMappingColumn(mappingId);
+        mergeMappingColumn(mapping);
+        // 更新meta
+        updateMeta(mapping, null);
 
         // 重置排序
         resetTableGroupAllIndex(mappingId);
@@ -126,6 +133,43 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
         return profileComponent.getSortedTableGroupAll(mappingId);
     }
 
+    @Override
+    public void updateMeta(Mapping mapping, String metaSnapshot) {
+        Meta meta = profileComponent.getMeta(mapping.getMetaId());
+        Assert.notNull(meta, "驱动meta不存在.");
+
+        // 清空状态
+        meta.clear();
+
+        // 手动配置增量点
+        if (StringUtil.isNotBlank(metaSnapshot)) {
+            Map snapshot = JsonUtil.jsonToObj(metaSnapshot, HashMap.class);
+            if (!CollectionUtils.isEmpty(snapshot)) {
+                meta.setSnapshot(snapshot);
+            }
+        }
+
+        getMetaTotal(meta, mapping.getModel());
+
+        meta.setUpdateTime(Instant.now().toEpochMilli());
+        profileComponent.editConfigModel(meta);
+    }
+
+    private void getMetaTotal(Meta meta, String model) {
+        // 全量同步
+        if (ModelEnum.isFull(model)) {
+            // 统计tableGroup总条数
+            AtomicLong count = new AtomicLong(0);
+            List<TableGroup> groupAll = profileComponent.getTableGroupAll(meta.getMappingId());
+            if (!CollectionUtils.isEmpty(groupAll)) {
+                for (TableGroup g : groupAll) {
+                    count.getAndAdd(g.getSourceTable().getCount());
+                }
+            }
+            meta.setTotal(count);
+        }
+    }
+
     private void resetTableGroupAllIndex(String mappingId) {
         synchronized (LOCK) {
             List<TableGroup> list = profileComponent.getSortedTableGroupAll(mappingId);
@@ -140,11 +184,8 @@ public class TableGroupServiceImpl extends BaseServiceImpl implements TableGroup
         }
     }
 
-    private void mergeMappingColumn(String mappingId) {
-        List<TableGroup> groups = profileComponent.getTableGroupAll(mappingId);
-
-        Mapping mapping = profileComponent.getMapping(mappingId);
-        Assert.notNull(mapping, "mapping not exist.");
+    private void mergeMappingColumn(Mapping mapping) {
+        List<TableGroup> groups = profileComponent.getTableGroupAll(mapping.getId());
 
         List<Field> sourceColumn = null;
         List<Field> targetColumn = null;

+ 1 - 1
dbsyncer-common/src/main/java/org/dbsyncer/common/model/Result.java

@@ -18,7 +18,7 @@ public class Result<T> {
     /**
      * 错误日志
      */
-    private StringBuffer error = new StringBuffer();
+    private final StringBuffer error = new StringBuffer();
 
     /**
      * 驱动表映射关系ID

+ 7 - 2
dbsyncer-common/src/main/java/org/dbsyncer/common/scheduled/impl/ScheduledTaskServiceImpl.java

@@ -32,7 +32,7 @@ public class ScheduledTaskServiceImpl implements ScheduledTaskService, Disposabl
     @Resource
     private ThreadPoolTaskScheduler taskScheduler;
 
-    private Map<String, ScheduledFuture> map = new ConcurrentHashMap<>();
+    private final Map<String, ScheduledFuture> map = new ConcurrentHashMap<>();
 
     @Override
     public void start(String key, String cron, ScheduledTaskJob job) {
@@ -73,7 +73,12 @@ public class ScheduledTaskServiceImpl implements ScheduledTaskService, Disposabl
             logger.error(msg);
             throw new CommonException(msg);
         }
-        map.putIfAbsent(key, scheduledFutureMapper.apply());
+        map.compute(key, (k,v) -> {
+            if (v == null) {
+                return scheduledFutureMapper.apply();
+            }
+            return v;
+        });
     }
 
     @Override

+ 4 - 0
dbsyncer-common/src/main/java/org/dbsyncer/common/util/StringUtil.java

@@ -47,6 +47,10 @@ public abstract class StringUtil {
         return StringUtils.isNotBlank(cs);
     }
 
+    public static String getIfBlank(final String str, final String defaultStr) {
+        return isNotBlank(str) ? str : defaultStr;
+    }
+
     public static String[] split(String str, String separatorChars) {
         return StringUtils.split(str, separatorChars);
     }

+ 0 - 1
dbsyncer-connector/dbsyncer-connector-base/pom.xml

@@ -68,7 +68,6 @@
             <version>${project.parent.version}</version>
         </dependency>
 
-
         <!-- dbsyncer-connector-sqlserver -->
         <dependency>
             <groupId>org.ghi</groupId>

+ 31 - 12
dbsyncer-connector/dbsyncer-connector-base/src/main/java/org/dbsyncer/connector/base/ConnectorFactory.java

@@ -7,21 +7,29 @@ import org.dbsyncer.common.model.Result;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.sdk.config.CommandConfig;
 import org.dbsyncer.sdk.config.DDLConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.AbstractConnector;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
 import org.dbsyncer.sdk.listener.Listener;
 import org.dbsyncer.sdk.model.ConnectorConfig;
 import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.model.Table;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.plugin.ReaderContext;
+import org.dbsyncer.sdk.schema.SchemaResolver;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.springframework.beans.factory.DisposableBean;
 import org.springframework.stereotype.Component;
 import org.springframework.util.Assert;
 
 import javax.annotation.PostConstruct;
-import java.util.*;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 /**
@@ -137,6 +145,11 @@ public class ConnectorFactory implements DisposableBean {
         return getConnectorService(connectorInstance.getConfig()).getMetaInfo(connectorInstance, tableName);
     }
 
+    public Object getPosition(ConnectorInstance connectorInstance) {
+        Assert.notNull(connectorInstance, "ConnectorInstance can not be null.");
+        return getConnectorService(connectorInstance.getConfig()).getPosition(connectorInstance);
+    }
+
     /**
      * 获取连接器同步参数
      *
@@ -166,7 +179,8 @@ public class ConnectorFactory implements DisposableBean {
         return getConnectorService(connectorInstance.getConfig()).getCount(connectorInstance, command);
     }
 
-    public Result reader(ConnectorInstance connectorInstance, ReaderContext context) {
+    public Result reader(ReaderContext context) {
+        ConnectorInstance connectorInstance = context.getSourceConnectorInstance();
         Assert.notNull(connectorInstance, "ConnectorInstance can not null");
         Assert.notNull(context, "ReaderContext can not null");
         Result result = getConnectorService(connectorInstance.getConfig()).reader(connectorInstance, context);
@@ -174,23 +188,28 @@ public class ConnectorFactory implements DisposableBean {
         return result;
     }
 
-    public Result writer(ConnectorInstance connectorInstance, WriterBatchConfig config) {
-        Assert.notNull(connectorInstance, "ConnectorInstance can not null");
-        Assert.notNull(config, "WriterBatchConfig can not null");
-        ConnectorService connector = getConnectorService(connectorInstance.getConfig());
-        if (connector instanceof AbstractConnector) {
-            AbstractConnector conn = (AbstractConnector) connector;
+    public Result writer(PluginContext context) {
+        ConnectorInstance targetInstance = context.getTargetConnectorInstance();
+        Assert.notNull(targetInstance, "targetConnectorInstance can not null");
+        ConnectorService targetConnector = getConnectorService(targetInstance.getConfig());
+        if (targetConnector instanceof AbstractConnector) {
+            AbstractConnector conn = (AbstractConnector) targetConnector;
             try {
-                conn.convertProcessBeforeWriter(connectorInstance, config);
+                // 支持标准解析器
+                if (context.isEnableSchemaResolver() && targetConnector.getSchemaResolver() != null) {
+                    conn.convertProcessBeforeWriter(context, targetConnector.getSchemaResolver());
+                } else {
+                    conn.convertProcessBeforeWriter(context, targetInstance);
+                }
             } catch (Exception e) {
                 Result result = new Result();
                 result.getError().append(e.getMessage());
-                result.addFailData(config.getData());
+                result.addFailData(context.getTargetList());
                 return result;
             }
         }
 
-        Result result = connector.writer(connectorInstance, config);
+        Result result = targetConnector.writer(targetInstance, context);
         Assert.notNull(result, "Connector writer batch result can not null");
         return result;
     }

+ 18 - 14
dbsyncer-connector/dbsyncer-connector-base/src/test/java/ConnectionTest.java

@@ -28,12 +28,14 @@ import java.time.Instant;
 import java.time.LocalDateTime;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.BrokenBarrierException;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.CyclicBarrier;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
 
 /**
  * @Author AE86
@@ -121,6 +123,17 @@ public class ConnectionTest {
         logger.info("test end");
     }
 
+    @Test
+    public void testQuery() {
+        final DatabaseConnectorInstance connectorInstance = new DatabaseConnectorInstance(createMysqlConfig());
+        // 3、执行SQL
+        String querySql = "SELECT * from test_schema where id = ?";
+        Object[] args = new Object[1];
+        args[0] = 9999999;
+        List<Map<String, Object>> list = connectorInstance.execute(databaseTemplate -> databaseTemplate.queryForList(querySql, args));
+        logger.info("test list={}", list);
+    }
+
     @Test
     public void testBatchInsert() {
         final DatabaseConnectorInstance connectorInstance = new DatabaseConnectorInstance(createMysqlConfig());
@@ -321,22 +334,13 @@ public class ConnectionTest {
         int total = dataList.size();
         int taskSize = total % batchSize == 0 ? total / batchSize : total / batchSize + 1;
         final CountDownLatch latch = new CountDownLatch(taskSize);
-        int fromIndex = 0;
-        int toIndex = batchSize;
+        int offset = 0;
         for (int i = 0; i < taskSize; i++) {
-            final List<Object[]> data;
-            if (toIndex > total) {
-                toIndex = fromIndex + (total % batchSize);
-                data = dataList.subList(fromIndex, toIndex);
-            } else {
-                data = dataList.subList(fromIndex, toIndex);
-                fromIndex += batchSize;
-                toIndex += batchSize;
-            }
-
+            List<Object[]> slice = dataList.stream().skip(offset).limit(batchSize).collect(Collectors.toList());
+            offset += batchSize;
             pool.submit(() -> {
                 try {
-                    connectorInstance.execute(databaseTemplate -> databaseTemplate.batchUpdate(sql, data));
+                    connectorInstance.execute(databaseTemplate -> databaseTemplate.batchUpdate(sql, slice));
                 } catch (Exception e) {
                     logger.error(e.getMessage());
                 } finally {
@@ -433,7 +437,7 @@ public class ConnectionTest {
 
     private DatabaseConfig createMysqlConfig() {
         DatabaseConfig config = new DatabaseConfig();
-        config.setUrl("jdbc:mysql://127.0.0.1:3305/test?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&useSSL=false&verifyServerCertificate=false&autoReconnect=true&failOverReadOnly=false");
+        config.setUrl("jdbc:mysql://127.0.0.1:3305/test?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&useSSL=false&verifyServerCertificate=false&autoReconnect=true&failOverReadOnly=false&tinyInt1isBit=false");
         config.setUsername("root");
         config.setPassword("123");
         config.setDriverClassName("com.mysql.cj.jdbc.Driver");

+ 22 - 33
dbsyncer-connector/dbsyncer-connector-elasticsearch/src/main/java/org/dbsyncer/connector/elasticsearch/ElasticsearchConnector.java

@@ -18,7 +18,6 @@ import org.dbsyncer.connector.elasticsearch.schema.ESOtherValueMapper;
 import org.dbsyncer.connector.elasticsearch.util.ESUtil;
 import org.dbsyncer.connector.elasticsearch.validator.ESConfigValidator;
 import org.dbsyncer.sdk.config.CommandConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.AbstractConnector;
 import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
@@ -31,6 +30,7 @@ import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.model.Filter;
 import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.model.Table;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.plugin.ReaderContext;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.dbsyncer.sdk.util.PrimaryKeyUtil;
@@ -65,7 +65,14 @@ import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.sql.Types;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -79,18 +86,10 @@ public final class ElasticsearchConnector extends AbstractConnector implements C
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private final String TYPE = "Elasticsearch";
-
     public static final String _SOURCE_INDEX = "_source_index";
-
-    public static final String _TARGET_INDEX = "_target_index";
-
-    public static final String _TYPE = "_type";
-
-    private static final int MAX_SIZE = 10000;
-
-    private final Map<String, FilterMapper> filters = new LinkedHashMap<>();
-
+    private final String _TARGET_INDEX = "_target_index";
+    private final String _TYPE = "_type";
+    private final Map<String, FilterMapper> filters = new ConcurrentHashMap<>();
     private final ESConfigValidator configValidator = new ESConfigValidator();
 
     public ElasticsearchConnector() {
@@ -108,17 +107,7 @@ public final class ElasticsearchConnector extends AbstractConnector implements C
 
     @Override
     public String getConnectorType() {
-        return TYPE;
-    }
-
-    @Override
-    public boolean isSupportedTiming() {
-        return true;
-    }
-
-    @Override
-    public boolean isSupportedLog() {
-        return false;
+        return "Elasticsearch";
     }
 
     @Override
@@ -243,7 +232,7 @@ public final class ElasticsearchConnector extends AbstractConnector implements C
         } else {
             builder.from((context.getPageIndex() - 1) * context.getPageSize());
         }
-        builder.size(context.getPageSize() > MAX_SIZE ? MAX_SIZE : context.getPageSize());
+        builder.size(Math.min(context.getPageSize(), 10000));
 
         try {
             SearchRequest rq = new SearchRequest(new String[]{context.getCommand().get(_SOURCE_INDEX)}, builder);
@@ -265,21 +254,21 @@ public final class ElasticsearchConnector extends AbstractConnector implements C
     }
 
     @Override
-    public Result writer(ESConnectorInstance connectorInstance, WriterBatchConfig config) {
-        List<Map> data = config.getData();
-        if (CollectionUtils.isEmpty(data) || CollectionUtils.isEmpty(config.getFields())) {
+    public Result writer(ESConnectorInstance connectorInstance, PluginContext context) {
+        List<Map> data = context.getTargetList();
+        if (CollectionUtils.isEmpty(data)) {
             logger.error("writer data can not be empty.");
             throw new ElasticsearchException("writer data can not be empty.");
         }
 
-        final Result result = new Result();
-        final List<Field> pkFields = PrimaryKeyUtil.findConfigPrimaryKeyFields(config);
+        Result result = new Result();
+        final List<Field> pkFields = PrimaryKeyUtil.findExistPrimaryKeyFields(context.getTargetFields());
         try {
             final BulkRequest request = new BulkRequest();
             final String pk = pkFields.get(0).getName();
-            final String indexName = config.getCommand().get(_TARGET_INDEX);
-            final String type = config.getCommand().get(_TYPE);
-            data.forEach(row -> addRequest(request, indexName, type, config.getEvent(), String.valueOf(row.get(pk)), row));
+            final String indexName = context.getCommand().get(_TARGET_INDEX);
+            final String type = context.getCommand().get(_TYPE);
+            data.forEach(row -> addRequest(request, indexName, type, context.getEvent(), String.valueOf(row.get(pk)), row));
 
             BulkResponse response = connectorInstance.getConnection().bulkWithVersion(request, RequestOptions.DEFAULT);
             RestStatus restStatus = response.status();

+ 11 - 19
dbsyncer-connector/dbsyncer-connector-file/src/main/java/org/dbsyncer/connector/file/FileConnector.java

@@ -14,7 +14,6 @@ import org.dbsyncer.connector.file.model.FileResolver;
 import org.dbsyncer.connector.file.model.FileSchema;
 import org.dbsyncer.connector.file.validator.FileConfigValidator;
 import org.dbsyncer.sdk.config.CommandConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.AbstractConnector;
 import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
@@ -23,13 +22,18 @@ import org.dbsyncer.sdk.listener.Listener;
 import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.model.Table;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.plugin.ReaderContext;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.util.Assert;
 
-import java.io.*;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.sql.Types;
@@ -51,7 +55,6 @@ public final class FileConnector extends AbstractConnector implements ConnectorS
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private final String TYPE = "File";
     private final String FILE_NAME = "fileName";
     private final String FILE_PATH = "filePath";
     private final FileResolver fileResolver = new FileResolver();
@@ -63,17 +66,7 @@ public final class FileConnector extends AbstractConnector implements ConnectorS
 
     @Override
     public String getConnectorType() {
-        return TYPE;
-    }
-
-    @Override
-    public boolean isSupportedTiming() {
-        return false;
-    }
-
-    @Override
-    public boolean isSupportedLog() {
-        return true;
+        return "File";
     }
 
     @Override
@@ -192,24 +185,23 @@ public final class FileConnector extends AbstractConnector implements ConnectorS
     }
 
     @Override
-    public Result writer(FileConnectorInstance connectorInstance, WriterBatchConfig config) {
-        List<Map> data = config.getData();
+    public Result writer(FileConnectorInstance connectorInstance, PluginContext context) {
+        List<Map> data = context.getTargetList();
         if (CollectionUtils.isEmpty(data)) {
             logger.error("writer data can not be empty.");
             throw new FileException("writer data can not be empty.");
         }
 
-        final List<Field> fields = config.getFields();
         final String separator = new String(new char[]{connectorInstance.getConfig().getSeparator()});
 
         Result result = new Result();
         OutputStream output = null;
         try {
-            final String filePath = connectorInstance.getFilePath(config.getCommand().get(FILE_NAME));
+            final String filePath = connectorInstance.getFilePath(context.getCommand().get(FILE_NAME));
             output = new FileOutputStream(filePath, true);
             List<String> lines = data.stream().map(row -> {
                 List<String> array = new ArrayList<>();
-                fields.forEach(field -> {
+                context.getTargetFields().forEach(field -> {
                     Object o = row.get(field.getName());
                     array.add(null != o ? String.valueOf(o) : "");
                 });

+ 4 - 4
dbsyncer-connector/dbsyncer-connector-file/src/main/java/org/dbsyncer/connector/file/model/FileResolver.java

@@ -9,9 +9,9 @@ import org.dbsyncer.connector.file.column.impl.FileColumnValue;
 import org.dbsyncer.sdk.model.Field;
 
 import java.util.ArrayList;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @Author AE86
@@ -20,11 +20,11 @@ import java.util.Map;
  */
 public class FileResolver {
 
-    private ColumnValue value = new FileColumnValue();
+    private final ColumnValue value = new FileColumnValue();
 
     public Map<String, Object> parseMap(List<Field> fields, char separator, String line) {
-        Map<String, Object> row = new LinkedHashMap<>();
-        parse(fields, separator, line, (key, value) -> row.put(key, value));
+        Map<String, Object> row = new ConcurrentHashMap<>();
+        parse(fields, separator, line, row::put);
         return row;
     }
 

+ 6 - 17
dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnector.java

@@ -10,7 +10,6 @@ import org.dbsyncer.common.util.JsonUtil;
 import org.dbsyncer.connector.kafka.config.KafkaConfig;
 import org.dbsyncer.connector.kafka.validator.KafkaConfigValidator;
 import org.dbsyncer.sdk.config.CommandConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.AbstractConnector;
 import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
@@ -18,6 +17,7 @@ import org.dbsyncer.sdk.listener.Listener;
 import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.model.Table;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.plugin.ReaderContext;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.dbsyncer.sdk.util.PrimaryKeyUtil;
@@ -40,22 +40,11 @@ public class KafkaConnector extends AbstractConnector implements ConnectorServic
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private final String TYPE = "Kafka";
     private final KafkaConfigValidator configValidator = new KafkaConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
-    }
-
-    @Override
-    public boolean isSupportedTiming() {
-        return false;
-    }
-
-    @Override
-    public boolean isSupportedLog() {
-        return false;
+        return "Kafka";
     }
 
     @Override
@@ -113,16 +102,16 @@ public class KafkaConnector extends AbstractConnector implements ConnectorServic
     }
 
     @Override
-    public Result writer(KafkaConnectorInstance connectorInstance, WriterBatchConfig config) {
-        List<Map> data = config.getData();
-        if (CollectionUtils.isEmpty(data) || CollectionUtils.isEmpty(config.getFields())) {
+    public Result writer(KafkaConnectorInstance connectorInstance, PluginContext context) {
+        List<Map> data = context.getTargetList();
+        if (CollectionUtils.isEmpty(data)) {
             logger.error("writer data can not be empty.");
             throw new KafkaException("writer data can not be empty.");
         }
 
         Result result = new Result();
         final KafkaConfig cfg = connectorInstance.getConfig();
-        final List<Field> pkFields = PrimaryKeyUtil.findConfigPrimaryKeyFields(config);
+        final List<Field> pkFields = PrimaryKeyUtil.findExistPrimaryKeyFields(context.getTargetFields());
         try {
             String topic = cfg.getTopic();
             // 默认取第一个主键

+ 8 - 6
dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/JsonToMapDeserializer.java

@@ -8,8 +8,8 @@ import org.apache.kafka.common.serialization.Deserializer;
 import org.dbsyncer.common.util.JsonUtil;
 
 import java.io.UnsupportedEncodingException;
-import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @Author AE86
@@ -23,19 +23,21 @@ public class JsonToMapDeserializer implements Deserializer<Map> {
     public void configure(Map<String, ?> configs, boolean isKey) {
         String propertyName = isKey ? "key.deserializer.encoding" : "value.deserializer.encoding";
         Object encodingValue = configs.get(propertyName);
-        if (encodingValue == null)
+        if (encodingValue == null) {
             encodingValue = configs.get("deserializer.encoding");
-        if (encodingValue != null && encodingValue instanceof String)
+        }
+        if (encodingValue != null && encodingValue instanceof String) {
             encoding = (String) encodingValue;
+        }
     }
 
     @Override
     public Map deserialize(String topic, byte[] data) {
         try {
-            if (data == null)
+            if (data == null) {
                 return null;
-            else
-                return JsonUtil.jsonToObj(new String(data, encoding), LinkedHashMap.class);
+            }
+            return JsonUtil.jsonToObj(new String(data, encoding), ConcurrentHashMap.class);
         } catch (UnsupportedEncodingException e) {
             throw new SerializationException("Error when deserializing byte[] to string due to unsupported encoding " + encoding);
         }

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/DQLMySQLConnector.java

@@ -23,12 +23,11 @@ import org.dbsyncer.sdk.plugin.ReaderContext;
  */
 public final class DQLMySQLConnector extends AbstractDQLConnector {
 
-    private final String TYPE = "DqlMySQL";
     private final DqlMySQLConfigValidator configValidator = new DqlMySQLConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "DqlMySQL";
     }
 
     @Override

+ 21 - 0
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/MySQLConnector.java

@@ -6,16 +6,21 @@ package org.dbsyncer.connector.mysql;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.mysql.cdc.MySQLListener;
 import org.dbsyncer.connector.mysql.schema.MySQLDateValueMapper;
+import org.dbsyncer.connector.mysql.schema.MySQLSchemaResolver;
 import org.dbsyncer.connector.mysql.storage.MySQLStorageService;
 import org.dbsyncer.connector.mysql.validator.MySQLConfigValidator;
+import org.dbsyncer.sdk.config.CommandConfig;
 import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.dbsyncer.sdk.connector.database.AbstractDatabaseConnector;
 import org.dbsyncer.sdk.constant.DatabaseConstant;
 import org.dbsyncer.sdk.enums.ListenerTypeEnum;
+import org.dbsyncer.sdk.enums.TableTypeEnum;
 import org.dbsyncer.sdk.listener.DatabaseQuartzListener;
 import org.dbsyncer.sdk.listener.Listener;
 import org.dbsyncer.sdk.model.PageSql;
+import org.dbsyncer.sdk.model.Table;
 import org.dbsyncer.sdk.plugin.ReaderContext;
+import org.dbsyncer.sdk.schema.SchemaResolver;
 import org.dbsyncer.sdk.storage.StorageService;
 import org.dbsyncer.sdk.util.PrimaryKeyUtil;
 import org.slf4j.Logger;
@@ -36,6 +41,7 @@ public final class MySQLConnector extends AbstractDatabaseConnector {
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
     private final MySQLConfigValidator configValidator = new MySQLConfigValidator();
+    private final MySQLSchemaResolver schemaResolver = new MySQLSchemaResolver();
 
     public MySQLConnector() {
         VALUE_MAPPERS.put(Types.DATE, new MySQLDateValueMapper());
@@ -135,9 +141,24 @@ public final class MySQLConnector extends AbstractDatabaseConnector {
         return newCursors;
     }
 
+    @Override
+    protected String getQueryCountSql(CommandConfig commandConfig, List<String> primaryKeys, String schema, String queryFilterSql) {
+        final Table table = commandConfig.getTable();
+        if (StringUtil.isNotBlank(queryFilterSql) || TableTypeEnum.isView(table.getType())) {
+            return super.getQueryCountSql(commandConfig, primaryKeys, schema, queryFilterSql);
+        }
+
+        // 从系统表查询
+        return String.format("SELECT TABLE_ROWS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = '%s' LIMIT 1", table.getName());
+    }
+
     @Override
     public boolean enableCursor() {
         return true;
     }
 
+    @Override
+    public SchemaResolver getSchemaResolver() {
+        return schemaResolver;
+    }
 }

+ 2 - 2
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/cdc/MySQLListener.java

@@ -347,9 +347,9 @@ public class MySQLListener extends AbstractDatabaseListener {
             if (StringUtil.isBlank(databaseName)) {
                 databaseName = data.getDatabase();
             }
+            databaseName = StringUtil.replace(databaseName, "`", "");
             if (isFilterTable(databaseName, tableName)) {
-                logger.info("sql:{}", data.getSql());
-                trySendEvent(new DDLChangedEvent(databaseName, tableName, ConnectorConstant.OPERTION_ALTER,
+                trySendEvent(new DDLChangedEvent(tableName, ConnectorConstant.OPERTION_ALTER,
                         data.getSql(), client.getBinlogFilename(), client.getBinlogPosition()));
             }
         }

+ 1 - 17
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/MySQLSchemaResolver.java

@@ -5,7 +5,6 @@ package org.dbsyncer.connector.mysql.schema;
 
 import org.dbsyncer.connector.mysql.MySQLException;
 import org.dbsyncer.connector.mysql.schema.support.*;
-import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.schema.AbstractSchemaResolver;
 import org.dbsyncer.sdk.schema.DataType;
 
@@ -22,14 +21,10 @@ import java.util.stream.Stream;
  */
 public final class MySQLSchemaResolver extends AbstractSchemaResolver {
 
-    private MySQLBytesType bytesType;
-
     @Override
     protected void initDataTypeMapping(Map<String, DataType> mapping) {
-        bytesType = new MySQLBytesType();
         Stream.of(
-                new MySQLBooleanType(),
-                bytesType,
+                new MySQLBytesType(),
                 new MySQLByteType(),
                 new MySQLDateType(),
                 new MySQLDecimalType(),
@@ -49,15 +44,4 @@ public final class MySQLSchemaResolver extends AbstractSchemaResolver {
         }));
     }
 
-    @Override
-    protected DataType getDataType(Map<String, DataType> mapping, Field field) {
-        DataType dataType = super.getDataType(mapping, field);
-        // bit(n > 1)
-        if (dataType instanceof MySQLBooleanType) {
-            if (field.getColumnSize() > 1) {
-                return bytesType;
-            }
-        }
-        return dataType;
-    }
 }

+ 0 - 51
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLBooleanType.java

@@ -1,51 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2024 All Rights Reserved.
- */
-package org.dbsyncer.connector.mysql.schema.support;
-
-import org.dbsyncer.sdk.model.Field;
-import org.dbsyncer.sdk.schema.support.BooleanType;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-/**
- * @Author 穿云
- * @Version 1.0.0
- * @Date 2024-11-26 22:59
- */
-public final class MySQLBooleanType extends BooleanType {
-
-    private enum TypeEnum {
-        BIT
-    }
-
-    @Override
-    public Set<String> getSupportedTypeName() {
-        return Arrays.stream(TypeEnum.values()).map(Enum::name).collect(Collectors.toSet());
-    }
-
-    @Override
-    protected Boolean merge(Object val, Field field) {
-        if (val instanceof Number) {
-            return ((Number) val).shortValue() == 1;
-        }
-        if (val instanceof BitSet) {
-            BitSet bitSet = (BitSet) val;
-            return bitSet.get(0);
-        }
-        return throwUnsupportedException(val, field);
-    }
-
-    @Override
-    protected Object convert(Object val, Field field) {
-        if (val instanceof Boolean) {
-            Boolean b = (Boolean) val;
-            return (short) (b ? 1 : 0);
-        }
-        return throwUnsupportedException(val, field);
-    }
-
-}

+ 20 - 0
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLByteType.java

@@ -7,6 +7,7 @@ import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.schema.support.ByteType;
 
 import java.util.Arrays;
+import java.util.BitSet;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -18,6 +19,7 @@ import java.util.stream.Collectors;
 public final class MySQLByteType extends ByteType {
 
     private enum TypeEnum {
+        BIT,
         TINYINT
     }
 
@@ -31,6 +33,24 @@ public final class MySQLByteType extends ByteType {
         if (val instanceof Number) {
             return ((Number) val).byteValue();
         }
+        if (val instanceof BitSet) {
+            BitSet bitSet = (BitSet) val;
+            byte[] bytes = bitSet.toByteArray();
+            if (bytes.length > 0) {
+                return bytes[0];
+            }
+            return 0;
+        }
+        if (val instanceof Boolean) {
+            Boolean b = (Boolean) val;
+            return (byte) (b ? 1 : 0);
+        }
+        if (val instanceof byte[]) {
+            byte[] bytes = (byte[]) val;
+            if (bytes.length > 1) {
+                return bytes[1];
+            }
+        }
         return throwUnsupportedException(val, field);
     }
 

+ 19 - 0
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/schema/support/MySQLShortType.java

@@ -7,6 +7,7 @@ import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.schema.support.ShortType;
 
 import java.util.Arrays;
+import java.util.BitSet;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -42,6 +43,24 @@ public final class MySQLShortType extends ShortType {
         if (val instanceof Number) {
             return ((Number) val).shortValue();
         }
+//        if (val instanceof BitSet) {
+//            BitSet bitSet = (BitSet) val;
+//            byte[] bytes = bitSet.toByteArray();
+//            if (bytes.length > 0) {
+//                return (short) bytes[0];
+//            }
+//            return 0;
+//        }
+//        if (val instanceof Boolean) {
+//            Boolean b = (Boolean) val;
+//            return (short) (b ? 1 : 0);
+//        }
+//        if (val instanceof byte[]) {
+//            byte[] bytes = (byte[]) val;
+//            if (bytes.length > 1) {
+//                return (short) bytes[1];
+//            }
+//        }
         return throwUnsupportedException(val, field);
     }
 

+ 1 - 1
dbsyncer-connector/dbsyncer-connector-mysql/src/main/java/org/dbsyncer/connector/mysql/storage/MySQLStorageService.java

@@ -68,7 +68,7 @@ public class MySQLStorageService extends AbstractStorageService {
     public void init(Properties properties) {
         DatabaseConfig config = new DatabaseConfig();
         config.setConnectorType(properties.getProperty("dbsyncer.storage.type"));
-        config.setUrl(properties.getProperty("dbsyncer.storage.mysql.url", "jdbc:mysql://127.0.0.1:3306/dbsyncer?rewriteBatchedStatements=true&seUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&useSSL=false&verifyServerCertificate=false&autoReconnect=true"));
+        config.setUrl(properties.getProperty("dbsyncer.storage.mysql.url", "jdbc:mysql://127.0.0.1:3306/dbsyncer?rewriteBatchedStatements=true&seUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&useSSL=false&verifyServerCertificate=false&autoReconnect=true&tinyInt1isBit=false"));
         config.setUsername(properties.getProperty("dbsyncer.storage.mysql.username", "admin"));
         config.setPassword(properties.getProperty("dbsyncer.storage.mysql.password", "admin"));
         config.setDriverClassName(properties.getProperty("dbsyncer.storage.mysql.driver-class-name"));

+ 1 - 1
dbsyncer-connector/dbsyncer-connector-mysql/src/main/resources/public/connector/addDqlMySQL.html

@@ -18,7 +18,7 @@
         <div class="col-sm-10">
             <textarea name="url" class="form-control dbsyncer_textarea_resize_none" maxlength="1024"
                       dbsyncer-valid="require" rows="5"
-                      th:text="${connector?.config?.url}?:'jdbc:mysql://127.0.0.1:3306/test?rewriteBatchedStatements=true&amp;useUnicode=true&amp;characterEncoding=UTF8&amp;serverTimezone=Asia/Shanghai&amp;useSSL=false&amp;verifyServerCertificate=false&amp;autoReconnect=true&amp;failOverReadOnly=false'"></textarea>
+                      th:text="${connector?.config?.url}?:'jdbc:mysql://127.0.0.1:3306/test?rewriteBatchedStatements=true&amp;useUnicode=true&amp;characterEncoding=UTF8&amp;serverTimezone=Asia/Shanghai&amp;useSSL=false&amp;verifyServerCertificate=false&amp;autoReconnect=true&amp;failOverReadOnly=false&amp;tinyInt1isBit=false'"></textarea>
         </div>
     </div>
     <div th:replace="connector/addDataBaseProperties :: content"></div>

+ 1 - 1
dbsyncer-connector/dbsyncer-connector-mysql/src/main/resources/public/connector/addMySQL.html

@@ -18,7 +18,7 @@
         <div class="col-sm-10">
             <textarea name="url" class="form-control dbsyncer_textarea_resize_none" maxlength="1024"
                       dbsyncer-valid="require" rows="5"
-                      th:text="${connector?.config?.url} ?: 'jdbc:mysql://127.0.0.1:3306/test?rewriteBatchedStatements=true&amp;useUnicode=true&amp;characterEncoding=UTF8&amp;serverTimezone=Asia/Shanghai&amp;useSSL=false&amp;verifyServerCertificate=false&amp;autoReconnect=true&amp;failOverReadOnly=false'"></textarea>
+                      th:text="${connector?.config?.url} ?: 'jdbc:mysql://127.0.0.1:3306/test?rewriteBatchedStatements=true&amp;useUnicode=true&amp;characterEncoding=UTF8&amp;serverTimezone=Asia/Shanghai&amp;useSSL=false&amp;verifyServerCertificate=false&amp;autoReconnect=true&amp;failOverReadOnly=false&amp;tinyInt1isBit=false'"></textarea>
         </div>
     </div>
     <div th:replace="connector/addDataBaseProperties :: content"></div>

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/DQLOracleConnector.java

@@ -23,12 +23,11 @@ import org.dbsyncer.sdk.plugin.ReaderContext;
  */
 public final class DQLOracleConnector extends AbstractDQLConnector {
 
-    private final String TYPE = "DqlOracle";
     private final DqlOracleConfigValidator configValidator = new DqlOracleConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "DqlOracle";
     }
 
     @Override

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/OracleConnector.java

@@ -38,7 +38,6 @@ public final class OracleConnector extends AbstractDatabaseConnector {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private final String TYPE = "Oracle";
     private final OracleConfigValidator configValidator = new OracleConfigValidator();
 
     public OracleConnector() {
@@ -48,7 +47,7 @@ public final class OracleConnector extends AbstractDatabaseConnector {
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "Oracle";
     }
 
     @Override

+ 4 - 10
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/cdc/OracleListener.java

@@ -30,7 +30,6 @@ import org.dbsyncer.sdk.model.Field;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.sql.SQLException;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -56,7 +55,6 @@ public class OracleListener extends AbstractDatabaseListener {
     @Override
     public void start() {
         try {
-            // TODO [increment-worker-1184659161326161921] 这里应该单独启动一个线程,线程名要有一定意义,如:binlog-parser-127.0.0.1:3306_123,便于监控排查问题
             final DatabaseConfig config = getConnectorInstance().getConfig();
             String driverClassName = config.getDriverClassName();
             String username = config.getUsername();
@@ -77,7 +75,7 @@ public class OracleListener extends AbstractDatabaseListener {
             });
             logMiner.start();
         } catch (Exception e) {
-            logger.error("启动失败:{}", e.getMessage());
+            logger.error("启动失败:{}", e.getMessage(), e);
             throw new OracleException(e);
         }
     }
@@ -144,19 +142,15 @@ public class OracleListener extends AbstractDatabaseListener {
             String tableName = getTableName(alter.getTable());
             if (tableFiledMap.containsKey(tableName)) {
                 logger.info("sql:{}", event.getRedoSql());
-                trySendEvent(new DDLChangedEvent(null, tableName, ConnectorConstant.OPERTION_ALTER, event.getRedoSql(), null, event.getScn()));
+                trySendEvent(new DDLChangedEvent(tableName, ConnectorConstant.OPERTION_ALTER, event.getRedoSql(), null, event.getScn()));
             }
         }
     }
 
     @Override
     public void close() {
-        try {
-            if (logMiner != null) {
-                logMiner.close();
-            }
-        } catch (SQLException e) {
-            logger.error(e.getMessage(), e);
+        if (logMiner != null) {
+            logMiner.close();
         }
     }
 

+ 0 - 383
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/dcn/DBChangeNotification.java

@@ -1,383 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.connector.oracle.dcn;
-
-import oracle.jdbc.OracleDriver;
-import oracle.jdbc.OracleStatement;
-import oracle.jdbc.dcn.DatabaseChangeEvent;
-import oracle.jdbc.dcn.DatabaseChangeListener;
-import oracle.jdbc.dcn.DatabaseChangeRegistration;
-import oracle.jdbc.dcn.RowChangeDescription;
-import oracle.jdbc.dcn.TableChangeDescription;
-import oracle.jdbc.driver.OracleConnection;
-import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.connector.oracle.OracleException;
-import org.dbsyncer.connector.oracle.model.DCNEvent;
-import org.dbsyncer.sdk.constant.ConnectorConstant;
-import org.dbsyncer.sdk.listener.event.RowChangedEvent;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.util.Assert;
-
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
-/**
- * 授予登录账号监听事件权限
- * <p>sqlplus/as sysdba
- * <p>
- * <p>grant change notification to AE86
- *
- * @Author AE86
- * @Version 1.0.0
- * @Date 2022-06-08 21:53
- */
-public class DBChangeNotification {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    private static final String QUERY_ROW_DATA_SQL = "SELECT * FROM \"%s\" WHERE ROWID='%s'";
-    private static final String QUERY_TABLE_ALL_SQL = "SELECT TABLE_NAME FROM USER_TAB_COMMENTS WHERE TABLE_TYPE='TABLE'";
-    private static final String QUERY_TABLE_ID_SQL = "SELECT OBJECT_ID FROM ALL_OBJECTS WHERE OBJECT_TYPE='TABLE' AND OBJECT_NAME='%s' AND OWNER='%s'";
-    private static final String QUERY_TABLE_SQL = "SELECT 1 FROM \"%s\" WHERE 1=2";
-    private static final String QUERY_CALLBACK_SQL = "SELECT REGID,CALLBACK FROM USER_CHANGE_NOTIFICATION_REGS";
-    private static final String CALLBACK = "net8://(ADDRESS=(PROTOCOL=tcp)(HOST=%s)(PORT=%s))?PR=0";
-
-    private String username;
-    private String password;
-    private String url;
-    private OracleConnection conn;
-    private DatabaseChangeRegistration dcr;
-    private Map<Integer, String> tables;
-    private Set<String> filterTable;
-    private List<RowEventListener> listeners = new ArrayList<>();
-    private final Lock connectLock = new ReentrantLock();
-    private volatile boolean connected;
-
-    public DBChangeNotification(String username, String password, String url) {
-        this.username = username;
-        this.password = password;
-        this.url = url;
-    }
-
-    public void addRowEventListener(RowEventListener rowEventListener) {
-        this.listeners.add(rowEventListener);
-    }
-
-    public void start() throws SQLException {
-        try {
-            connectLock.lock();
-            if (connected) {
-                logger.error("DBChangeNotification is already started");
-                return;
-            }
-            conn = connect();
-            connected = true;
-            OracleStatement statement = (OracleStatement) conn.createStatement();
-            readTables(statement);
-
-            Properties prop = new Properties();
-            prop.setProperty(OracleConnection.DCN_NOTIFY_ROWIDS, "true");
-            prop.setProperty(OracleConnection.DCN_IGNORE_UPDATEOP, "false");
-            prop.setProperty(OracleConnection.DCN_IGNORE_INSERTOP, "false");
-            prop.setProperty(OracleConnection.DCN_IGNORE_DELETEOP, "false");
-
-            // add the listener:NTFDCNRegistration
-            dcr = conn.registerDatabaseChangeNotification(prop);
-            dcr.addListener(new DCNListener());
-
-            final long regId = dcr.getRegId();
-            final String host = getHost(dcr);
-            final int port = getPort(dcr);
-            final String callback = String.format(CALLBACK, host, port);
-            logger.info("regId:{}, callback:{}", regId, callback);
-            // clean the registrations
-            clean(statement, regId, callback);
-            statement.setDatabaseChangeRegistration(dcr);
-
-            // 配置监听表
-            for (Map.Entry<Integer, String> m : tables.entrySet()) {
-                String sql = String.format(QUERY_TABLE_SQL, m.getValue());
-                try {
-                    statement.executeQuery(sql);
-                } catch (SQLException e) {
-                    logger.debug("配置监听表异常:{}, {}", sql, e.getMessage());
-                }
-            }
-            close(statement);
-        } catch (SQLException ex) {
-            // if an exception occurs, we need to close the registration in order
-            // to interrupt the thread otherwise it will be hanging around.
-            close();
-            throw ex;
-        } finally {
-            connectLock.unlock();
-        }
-    }
-
-    public OracleConnection getOracleConnection() {
-        return conn;
-    }
-
-    public boolean isConnected() {
-        return connected;
-    }
-
-    public void setFilterTable(Set<String> filterTable) {
-        this.filterTable = filterTable;
-    }
-
-    public void close() {
-        connected = false;
-        try {
-            if (null != conn) {
-                conn.unregisterDatabaseChangeNotification(dcr);
-            }
-            close(conn);
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        }
-    }
-
-    public void close(AutoCloseable rs) {
-        if (null != rs) {
-            try {
-                rs.close();
-            } catch (Exception e) {
-                logger.error(e.getMessage());
-            }
-        }
-    }
-
-    public void read(String tableName, String rowId, List<Object> data) {
-        OracleStatement os = null;
-        ResultSet rs = null;
-        try {
-            os = createStatement();
-            rs = os.executeQuery(String.format(QUERY_ROW_DATA_SQL, tableName, rowId));
-            if (rs.next()) {
-                final int size = rs.getMetaData().getColumnCount();
-                do {
-                    data.add(rowId);
-                    for (int i = 1; i <= size; i++) {
-                        data.add(rs.getObject(i));
-                    }
-                } while (rs.next());
-            }
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        } finally {
-            close(rs);
-            close(os);
-        }
-    }
-
-    private OracleStatement createStatement() throws SQLException {
-        try {
-            OracleStatement statement = (OracleStatement) conn.createStatement();
-            Assert.notNull(statement, "Can't create statement, trying to reconnect.");
-            return statement;
-        } catch (Exception e) {
-            connected = false;
-            logger.error(e.getMessage());
-        }
-        conn = connect();
-        connected = true;
-        logger.info("重连成功");
-        return (OracleStatement) conn.createStatement();
-    }
-
-    private void readTables(OracleStatement statement) {
-        tables = new LinkedHashMap<>();
-        List<String> tableList = queryForList(statement, QUERY_TABLE_ALL_SQL, rs -> rs.getString(1));
-        Assert.notEmpty(tableList, "No tables available");
-        final String owner = username.toUpperCase();
-        tableList.forEach(tableName -> tables.put(queryForObject(statement, String.format(QUERY_TABLE_ID_SQL, tableName, owner), rs -> rs.getInt(1)), tableName));
-    }
-
-    private <T> List<T> queryForList(OracleStatement statement, String sql, ResultSetMapper<T> mapper) {
-        ResultSet rs = null;
-        List<T> list = new ArrayList<>();
-        try {
-            rs = statement.executeQuery(sql);
-            while (rs.next()) {
-                list.add(mapper.apply(rs));
-            }
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        } finally {
-            close(rs);
-        }
-        return list;
-    }
-
-    private <T> T queryForObject(OracleStatement statement, String sql, ResultSetMapper<T> mapper) {
-        ResultSet rs = null;
-        T apply = null;
-        try {
-            rs = statement.executeQuery(sql);
-            while (rs.next()) {
-                apply = mapper.apply(rs);
-                break;
-            }
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        } finally {
-            close(rs);
-        }
-        return apply;
-    }
-
-    private Object invokeDCR(DatabaseChangeRegistration dcr, String declaredMethod) {
-        try {
-            Class clazz = dcr.getClass().getSuperclass();
-            Method method = clazz.getDeclaredMethod(declaredMethod);
-            method.setAccessible(true);
-            return method.invoke(dcr, new Object[]{});
-        } catch (NoSuchMethodException e) {
-            logger.error(e.getMessage());
-        } catch (IllegalAccessException e) {
-            logger.error(e.getMessage());
-        } catch (InvocationTargetException e) {
-            logger.error(e.getMessage());
-        }
-        throw new OracleException(String.format("Can't invoke '%s'.", declaredMethod));
-    }
-
-    /**
-     * ServiceName: jdbc:oracle:thin:@//host:port/serviceName
-     * TNS: jdbc:oracle:thin:@(description=(address=(protocol=tcp)(port=1521)(host=127.0.0.1))(connect_data=(service_name=orcl)))
-     * SID: jdbc:oracle:thin:@host:port:sid
-     *
-     * @param dcr
-     * @return
-     */
-    private String getHost(DatabaseChangeRegistration dcr) {
-        if (StringUtil.isBlank(url)) {
-            throw new IllegalArgumentException("url is null");
-        }
-
-        // TNS
-        if (StringUtil.startsWith(url, "jdbc:oracle:thin:@(")) {
-            Object obj = invokeDCR(dcr, "getClientHost");
-            return String.valueOf(obj);
-        }
-
-        // SID
-        String host = url.substring(url.indexOf("@") + 1);
-        host = host.substring(0, host.indexOf(":"));
-        return host;
-    }
-
-    private int getPort(DatabaseChangeRegistration dcr) {
-        Object obj = invokeDCR(dcr, "getClientTCPPort");
-        return Integer.parseInt(String.valueOf(obj));
-    }
-
-    private void clean(OracleStatement statement, long excludeRegId, String excludeCallback) {
-        ResultSet rs = null;
-        try {
-            rs = statement.executeQuery(QUERY_CALLBACK_SQL);
-            while (rs.next()) {
-                long regId = rs.getLong(1);
-                String callback = rs.getString(2);
-
-                if (regId != excludeRegId && callback.equals(excludeCallback)) {
-                    logger.info("Clean regid:{}, callback:{}", regId, callback);
-                    conn.unregisterDatabaseChangeNotification(regId, callback);
-                }
-            }
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        } finally {
-            close(rs);
-        }
-    }
-
-    private OracleConnection connect() throws SQLException {
-        OracleDriver dr = new OracleDriver();
-        Properties prop = new Properties();
-        prop.setProperty(OracleConnection.CONNECTION_PROPERTY_USER_NAME, username);
-        prop.setProperty(OracleConnection.CONNECTION_PROPERTY_PASSWORD, password);
-        return (OracleConnection) dr.connect(url, prop);
-    }
-
-    private interface ResultSetMapper<T> {
-        T apply(ResultSet rs) throws SQLException;
-    }
-
-    private void parseEvent(DCNEvent event) {
-        List<Object> data = new ArrayList<>();
-        if (event.getCode() == TableChangeDescription.TableOperation.UPDATE.getCode()) {
-            read(event.getTableName(), event.getRowId(), data);
-            listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_UPDATE, data)));
-            return;
-        }
-
-        if (event.getCode() == TableChangeDescription.TableOperation.INSERT.getCode()) {
-            read(event.getTableName(), event.getRowId(), data);
-            listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_INSERT, data)));
-            return;
-        }
-
-        data.add(event.getRowId());
-        listeners.forEach(listener -> listener.onEvents(new RowChangedEvent(event.getTableName(), ConnectorConstant.OPERTION_DELETE, data)));
-    }
-
-    final class DCNListener implements DatabaseChangeListener {
-
-        @Override
-        public void onDatabaseChangeNotification(DatabaseChangeEvent event) {
-            // 只接收当前注册的监听
-            if (dcr.getRegId() != event.getRegId()) {
-                return;
-            }
-            DatabaseChangeEvent.EventType eventType = event.getEventType();
-            if (eventType == DatabaseChangeEvent.EventType.OBJCHANGE) {
-                for (TableChangeDescription td : event.getTableChangeDescription()) {
-                    RowChangeDescription[] rds = td.getRowChangeDescription();
-                    for (RowChangeDescription rd : rds) {
-                        String tableName = tables.get(td.getObjectNumber());
-                        if (!filterTable.contains(tableName)) {
-                            logger.info("Table[{}] {}", tableName, rd.getRowOperation().name());
-                            continue;
-                        }
-                        parseEvent(new DCNEvent(tableName, rd.getRowid().stringValue(), rd.getRowOperation().getCode()));
-                    }
-                }
-                return;
-            }
-
-            // 断线
-            if (eventType == DatabaseChangeEvent.EventType.SHUTDOWN) {
-                connected = false;
-                logger.error("连接中断,等待Oracle数据库重启中...");
-                return;
-            }
-
-            // 重启
-            if (eventType == DatabaseChangeEvent.EventType.STARTUP) {
-                try {
-                    conn = connect();
-                    connected = true;
-                    logger.info("重连成功");
-                } catch (SQLException e) {
-                    logger.error("重连异常", e);
-                }
-            }
-        }
-
-    }
-
-}

+ 0 - 19
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/dcn/RowEventListener.java

@@ -1,19 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.connector.oracle.dcn;
-
-import org.dbsyncer.sdk.listener.event.RowChangedEvent;
-
-/**
- * 行变更监听器
- *
- * @Author AE86
- * @Version 1.0.0
- * @Date 2022-06-08 21:53
- */
-public interface RowEventListener {
-
-    void onEvents(RowChangedEvent rowChangedEvent);
-
-}

+ 115 - 117
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/logminer/LogMiner.java

@@ -9,12 +9,16 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.math.BigInteger;
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
-import java.util.stream.Collectors;
 
 /**
  * @Author AE86
@@ -30,7 +34,8 @@ public class LogMiner {
     private final String url;
     private final String schema;
     private final String driverClassName;
-    private final String miningStrategy = "DBMS_LOGMNR.DICT_FROM_ONLINE_CATALOG";
+    private final int queryTimeout = 300;
+    private final int fetchSize = 1000;
     private volatile boolean connected = false;
     private Connection connection;
     private List<BigInteger> currentRedoLogSequences;
@@ -50,15 +55,20 @@ public class LogMiner {
         this.driverClassName = driverClassName;
     }
 
-    public void close() throws SQLException {
+    public void close() {
         connected = false;
+        closeQuietly();
+    }
+
+    private void closeQuietly() {
+        if (isValid()) {
+            LogMinerHelper.endLogMiner(connection);
+        }
         if (null != worker && !worker.isInterrupted()) {
             worker.interrupt();
             worker = null;
         }
-        if (connection != null) {
-            connection.close();
-        }
+        close(connection);
     }
 
     public void start() throws SQLException {
@@ -80,20 +90,45 @@ public class LogMiner {
         }
     }
 
+    private Connection createConnection() throws SQLException {
+        return DatabaseUtil.getConnection(driverClassName, url, username, password);
+    }
+
+    private Connection validateConnection() throws SQLException {
+        Connection conn = null;
+        try {
+            conn = DatabaseUtil.getConnection(driverClassName, url, username, password);
+            LogMinerHelper.setSessionParameter(conn);
+            int version = conn.getMetaData().getDatabaseMajorVersion();
+            // 19支持cdb模式
+            if (version == 19) {
+                LogMinerHelper.setSessionContainerIfCdbMode(conn);
+            }
+            // 低于10不支持
+            else if (version < 10) {
+                throw new IllegalArgumentException(String.format("Unsupported database version: %d(current) < 10", version));
+            }
+            // 检查账号权限
+            LogMinerHelper.checkPermissions(conn, version);
+        } catch (Exception e) {
+            close(conn);
+            throw e;
+        }
+        return conn;
+    }
+
     private void connect() throws SQLException {
-        this.connection = DatabaseUtil.getConnection(driverClassName, url, username, password);
+        this.connection = validateConnection();
         // 判断是否第一次读取
         if (startScn == 0) {
-            startScn = getCurrentScn(connection);
+            startScn = LogMinerHelper.getCurrentScn(connection);
+            restartLogMiner(startScn);
+        } else {
+            restartLogMiner(LogMinerHelper.getCurrentScn(connection));
         }
-        logger.info("start LogMiner, scn={}", startScn);
-        LogMinerHelper.setSessionParameter(connection);
-        // 1.记录当前redoLog,用于下文判断redoLog 是否切换
-        currentRedoLogSequences = LogMinerHelper.getCurrentRedoLogSequences(connection);
-        // 2.构建数据字典 && add redo / archived log
-        initializeLogMiner();
+        logger.info("Start log miner, scn={}", startScn);
         worker = new Worker();
-        worker.setName(new StringBuilder("log-miner-parser-").append(url).append("_").append(worker.hashCode()).toString());
+        worker.setName("log-miner-parser-" + url + "_" + worker.hashCode());
         worker.setDaemon(false);
         worker.start();
     }
@@ -102,18 +137,12 @@ public class LogMiner {
         logger.error("Connection interrupted, attempting to reconnect");
         while (connected) {
             try {
-                if (null != worker && !worker.isInterrupted()) {
-                    worker.interrupt();
-                    worker = null;
-                }
-                if (connection != null) {
-                    connection.close();
-                }
+                closeQuietly();
                 connect();
                 logger.info("Reconnect successfully");
                 break;
             } catch (Exception e) {
-                logger.error(url, e);
+                logger.error("Reconnect failed", e);
                 sleepSeconds(5);
             }
         }
@@ -129,21 +158,9 @@ public class LogMiner {
         }
     }
 
-    public long getCurrentScn(Connection connection) throws SQLException {
-        try (Statement statement = connection.createStatement()) {
-            ResultSet rs = statement.executeQuery("select CURRENT_SCN from V$DATABASE");
-
-            if (!rs.next()) {
-                throw new IllegalStateException("Couldn't get SCN");
-            }
-
-            return rs.getLong(1);
-        }
-    }
-
-    private void restartLogMiner() throws SQLException {
-        LogMinerHelper.endLogMiner(connection);
-        initializeLogMiner();
+    private void restartLogMiner(long endScn) throws SQLException {
+        LogMinerHelper.startLogMiner(connection, startScn, endScn);
+        currentRedoLogSequences = LogMinerHelper.getCurrentRedoLogSequences(connection);
     }
 
     private boolean redoLogSwitchOccurred() throws SQLException {
@@ -155,41 +172,6 @@ public class LogMiner {
         return false;
     }
 
-    private BigInteger determineEndScn() throws SQLException {
-        return BigInteger.valueOf(getCurrentScn(connection));
-    }
-
-    private void initializeLogMiner() throws SQLException {
-        // 默认使用在线数据字典,所以此处不做数据字典相关操作
-        LogMinerHelper.buildDataDictionary(connection, miningStrategy);
-
-        setRedoLog();
-    }
-
-    private void setRedoLog() throws SQLException {
-        LogMinerHelper.removeLogFilesFromMining(connection);
-        List<LogFile> onlineLogFiles = LogMinerHelper.getOnlineLogFilesForOffsetScn(connection, BigInteger.valueOf(startScn));
-        List<LogFile> archivedLogFiles = LogMinerHelper.getArchivedLogFilesForOffsetScn(connection, BigInteger.valueOf(startScn));
-        List<String> logFilesNames = archivedLogFiles.stream().map(LogFile::getFileName).collect(Collectors.toList());
-        for (LogFile onlineLogFile : onlineLogFiles) {
-            boolean found = false;
-            for (LogFile archivedLogFile : archivedLogFiles) {
-                if (onlineLogFile.isSameRange(archivedLogFile)) {
-                    // 如果redo 已经被归档,那么就不需要加载这个redo了
-                    found = true;
-                    break;
-                }
-            }
-            if (!found)
-                logFilesNames.add(onlineLogFile.getFileName());
-        }
-
-        // 加载所需要的redo / archived
-        for (String fileName : logFilesNames) {
-            LogMinerHelper.addLogFile(connection, fileName);
-        }
-    }
-
     private void logMinerViewProcessor(ResultSet rs) throws SQLException {
         while (rs.next()) {
             BigInteger scn = rs.getBigDecimal("SCN").toBigInteger();
@@ -198,12 +180,6 @@ public class LogMiner {
             int operationCode = rs.getInt("OPERATION_CODE");
             Timestamp changeTime = rs.getTimestamp("TIMESTAMP");
             String txId = rs.getString("XID");
-            String operation = rs.getString("OPERATION");
-            String username = rs.getString("USERNAME");
-
-            logger.trace("Capture record, SCN:{}, TABLE_NAME:{}, SEG_OWNER:{}, OPERATION_CODE:{}, TIMESTAMP:{}, XID:{}, OPERATION:{}, USERNAME:{}",
-                    scn, tableName, segOwner, operationCode, changeTime, txId, operation, username);
-
             // Commit
             if (operationCode == LogMinerHelper.LOG_MINER_OC_COMMIT) {
                 // 将TransactionalBuffer中当前事务的DML 转移到消费者处理
@@ -309,61 +285,83 @@ public class LogMiner {
         return connected;
     }
 
+    // 判断连接是否正常
+    private boolean isValid() {
+        try {
+            return connection != null && connection.isValid(queryTimeout);
+        } catch (SQLException e) {
+            return false;
+        }
+    }
+
+    private void close(AutoCloseable closeable) {
+        if (null != closeable) {
+            try {
+                closeable.close();
+            } catch (Exception e) {
+                logger.error(e.getMessage());
+            }
+        }
+    }
+
+    /** 关闭数据库连接资源 */
+    private void closeResources(ResultSet rs, Statement stmt) {
+        close(rs);
+        close(stmt);
+    }
+
     final class Worker extends Thread {
 
         @Override
         public void run() {
             String minerViewQuery = LogMinerHelper.logMinerViewQuery(schema, username);
-            try (PreparedStatement minerViewStatement = connection.prepareStatement(minerViewQuery, ResultSet.TYPE_FORWARD_ONLY,
-                    ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT)) {
+            PreparedStatement statement = null;
+            ResultSet rs = null;
+            try {
                 while (!isInterrupted() && connected) {
+                    if (!isValid()) {
+                        connection = createConnection();
+                    }
+                    closeResources(rs, statement);
                     // 1.确定 endScn
-                    BigInteger endScn = determineEndScn();
+                    long endScn = LogMinerHelper.getCurrentScn(connection);
 
                     // 2.是否发生redoLog切换
                     if (redoLogSwitchOccurred()) {
-                        // 如果切换则重启logMiner会话
-                        restartLogMiner();
-                        currentRedoLogSequences = LogMinerHelper.getCurrentRedoLogSequences(connection);
+                        logger.info("Switch to new redo log");
+                        restartLogMiner(endScn);
                     }
 
-                    // 3.start logMiner
-                    LogMinerHelper.startLogMiner(connection, BigInteger.valueOf(startScn), endScn, miningStrategy);
-
-                    // 4.查询 logMiner view, 处理结果集
-                    minerViewStatement.setFetchSize(2000);
-                    minerViewStatement.setFetchDirection(ResultSet.FETCH_FORWARD);
-                    minerViewStatement.setString(1, String.valueOf(startScn));
-                    minerViewStatement.setString(2, endScn.toString());
-                    try (ResultSet rs = minerViewStatement.executeQuery()) {
-                        try {
-                            logMinerViewProcessor(rs);
-                        } catch (SQLException e) {
-                            if (e.getMessage().contains("ORA-00310")) {
-                                logger.error("ORA-00310 try continue");
-                                restartLogMiner();
-                                currentRedoLogSequences = LogMinerHelper.getCurrentRedoLogSequences(connection);
-                                continue;
-                            }
-                            throw e;
+                    // 3.查询 logMiner view, 处理结果集
+                    statement = connection.prepareStatement(minerViewQuery, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT);
+                    statement.setFetchSize(fetchSize);
+                    statement.setFetchDirection(ResultSet.FETCH_FORWARD);
+                    statement.setQueryTimeout(queryTimeout);
+                    statement.setString(1, String.valueOf(startScn));
+                    statement.setString(2, String.valueOf(endScn));
+                    try {
+                        rs = statement.executeQuery();
+                        logMinerViewProcessor(rs);
+                    } catch (SQLException e) {
+                        if (e.getMessage().contains("ORA-00310")) {
+                            logger.info("ORA-00310 restart log miner");
+                            LogMinerHelper.endLogMiner(connection);
+                            restartLogMiner(endScn);
+                            continue;
                         }
+                        throw e;
                     }
-
-                    // 5.确定新的SCN
-                    startScn = Long.parseLong(endScn.toString());
-                    sleepSeconds(3);
+                    // 4.确定新的SCN
+                    startScn = endScn;
+                    sleepSeconds(1);
                 }
             } catch (Exception e) {
-                if (e instanceof SQLRecoverableException) {
+                if (connected) {
+                    logger.error(e.getMessage(), e);
                     recover();
-                    return;
-                }
-                logger.error(e.getMessage(), e);
-                try {
-                    close();
-                } catch (SQLException ex) {
-                    logger.error(ex.getMessage(), ex);
                 }
+            } finally {
+                closeResources(rs, statement);
             }
         }
     }

+ 137 - 118
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/logminer/LogMinerHelper.java

@@ -3,7 +3,9 @@
  */
 package org.dbsyncer.connector.oracle.logminer;
 
+import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.connector.oracle.OracleException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -15,10 +17,10 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
-import java.util.LinkedHashSet;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Objects;
-import java.util.Set;
 
 /**
  * @Author AE86
@@ -26,7 +28,7 @@ import java.util.Set;
  * @Date 2023-12-09 20:23
  */
 public class LogMinerHelper {
-    private static final Logger LOGGER = LoggerFactory.getLogger(LogMinerHelper.class);
+    private static final Logger logger = LoggerFactory.getLogger(LogMinerHelper.class);
     public static final int LOG_MINER_OC_INSERT = 1;
     public static final int LOG_MINER_OC_DELETE = 2;
     public static final int LOG_MINER_OC_UPDATE = 3;
@@ -34,21 +36,46 @@ public class LogMinerHelper {
     public static final int LOG_MINER_OC_COMMIT = 7;
     public static final int LOG_MINER_OC_MISSING_SCN = 34;
     public static final int LOG_MINER_OC_ROLLBACK = 36;
-
-    public static void removeLogFilesFromMining(Connection conn) throws SQLException {
-        try (PreparedStatement ps = conn.prepareStatement("SELECT FILENAME AS NAME FROM V$LOGMNR_LOGS");
-             ResultSet result = ps.executeQuery()) {
-            Set<String> files = new LinkedHashSet<>();
-            while (result.next()) {
-                files.add(result.getString(1));
-            }
-            for (String fileName : files) {
-                String sql = String.format("BEGIN SYS.DBMS_LOGMNR.REMOVE_LOGFILE(LOGFILENAME => '%s');END;", fileName);
-                executeCallableStatement(conn, sql);
-                LOGGER.debug("File {} was removed from mining", fileName);
-            }
-        }
-    }
+    private static final String LOG_MINER_SQL_QUERY_ROLES = "SELECT * FROM USER_ROLE_PRIVS";
+    private static final String LOG_MINER_KEY_GRANTED_ROLE = "GRANTED_ROLE";
+    private static final String LOG_MINER_SQL_QUERY_PRIVILEGES = "SELECT * FROM SESSION_PRIVS";
+    private static final String LOG_MINER_KEY_PRIVILEGE = "PRIVILEGE";
+    private static final List<String> LOG_MINER_PRIVILEGES_NEEDED = Arrays.asList("SELECT_CATALOG_ROLE", "CREATE SESSION", "SELECT ANY TRANSACTION", "SELECT ANY DICTIONARY", "LOGMINING");
+    private static final List<String> LOG_MINER_ORACLE_11_PRIVILEGES_NEEDED = Arrays.asList("SELECT_CATALOG_ROLE", "CREATE SESSION", "SELECT ANY TRANSACTION", "SELECT ANY DICTIONARY");
+    private static final String LOG_MINER_DBA_ROLE = "DBA";
+    private static final String LOG_MINER_SQL_GET_CURRENT_SCN = "select CURRENT_SCN from V$DATABASE";
+    private static final String LOG_MINER_SQL_IS_CDB = "select cdb from v$database";
+    private static final String LOG_MINER_SQL_ALTER_SESSION_CONTAINER = "alter session set container=CDB$ROOT";
+    private static final String LOG_MINER_SQL_ALTER_NLS_SESSION_PARAMETERS = "ALTER SESSION SET "
+            + "  NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
+            + "  NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
+            + "  NLS_TIMESTAMP_TZ_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'"
+            + "  NLS_NUMERIC_CHARACTERS = '.,'"
+            + "  TIME_ZONE = '00:00'";
+    private static final String LOG_MINER_SQL_CURRENT_REDO_SEQUENCE = "SELECT SEQUENCE# FROM V$LOG WHERE STATUS = 'CURRENT'";
+    private static final String LOG_MINER_SQL_END_LOG_MINER = "BEGIN SYS.DBMS_LOGMNR.END_LOGMNR(); END;";
+    private static final String LOG_MINER_SQL_START_LOG_MINER = "DECLARE\n" +
+            "start_scn NUMBER := ?; end_scn NUMBER := ?; first_file BOOLEAN := true; \n" +
+            "BEGIN \n" +
+            "FOR log_file IN\n" +
+            " (\n" +
+            "  SELECT MIN(name) name, first_change# FROM \n" +
+            "  (\n" +
+            "   SELECT member AS name, first_change# FROM v$log l INNER JOIN v$logfile f ON l.group# = f.group# WHERE (l.STATUS = 'CURRENT' OR l.STATUS = 'ACTIVE') AND first_change# < end_scn\n" +
+            "   UNION\n" +
+            "   SELECT name, first_change# FROM v$archived_log WHERE name IS NOT NULL AND STANDBY_DEST='NO' AND first_change# < end_scn AND next_change# > start_scn \n" +
+            "  ) group by first_change# ORDER BY first_change# \n" +
+            " ) LOOP \n" +
+            " IF first_file THEN\n" +
+            "  SYS.DBMS_LOGMNR.add_logfile(log_file.name, SYS.DBMS_LOGMNR.NEW);\n" +
+            "  first_file := false;\n" +
+            " ELSE\n" +
+            "  SYS.DBMS_LOGMNR.add_logfile(log_file.name, SYS.DBMS_LOGMNR.ADDFILE);\n" +
+            " END IF;\n" +
+            "END LOOP;\n" +
+            "\n" +
+            "SYS.DBMS_LOGMNR.start_logmnr( options => SYS.DBMS_LOGMNR.SKIP_CORRUPTION + SYS.DBMS_LOGMNR.NO_SQL_DELIMITER + SYS.DBMS_LOGMNR.NO_ROWID_IN_STMT + SYS.DBMS_LOGMNR.DICT_FROM_ONLINE_CATALOG);\n" +
+            "END;";
 
     public static void executeCallableStatement(Connection connection, String statement) throws SQLException {
         Objects.requireNonNull(statement);
@@ -57,104 +84,29 @@ public class LogMinerHelper {
         }
     }
 
-    public static List<LogFile> getOnlineLogFilesForOffsetScn(Connection connection, BigInteger offsetScn) throws SQLException {
-        List<LogFile> redoLogFiles = new ArrayList<>();
-
-        String onlineLogQuery = "SELECT MIN(F.MEMBER) AS FILE_NAME, L.NEXT_CHANGE# AS NEXT_CHANGE, F.GROUP#, L.FIRST_CHANGE# AS FIRST_CHANGE, L.STATUS " +
-                " FROM V$LOG L, V$LOGFILE F " +
-                " WHERE F.GROUP# = L.GROUP# AND L.NEXT_CHANGE# > 0 " +
-                " GROUP BY F.GROUP#, L.NEXT_CHANGE#, L.FIRST_CHANGE#, L.STATUS ORDER BY 3";
-
-        try (PreparedStatement s = connection.prepareStatement(onlineLogQuery)) {
-            try (ResultSet rs = s.executeQuery()) {
-                while (rs.next()) {
-                    String fileName = rs.getString(1);
-                    BigInteger nextChangeNumber = new BigInteger(rs.getString(2));
-                    BigInteger firstChangeNumber = new BigInteger(rs.getString(4));
-                    String status = rs.getString(5);
-                    LogFile logFile = new LogFile(fileName, firstChangeNumber, nextChangeNumber, "CURRENT".equalsIgnoreCase(status));
-                    // 添加Current Redo || scn 范围符合的
-                    if (logFile.isCurrent() || logFile.getNextScn().compareTo(offsetScn) >= 0) {
-                        redoLogFiles.add(logFile);
-                    }
-                }
-            }
-        }
-        return redoLogFiles;
-    }
-
-    public static List<LogFile> getArchivedLogFilesForOffsetScn(Connection connection, BigInteger offsetScn) throws SQLException {
-        String archiveLogsQuery = String.format("SELECT NAME AS FILE_NAME, NEXT_CHANGE# AS NEXT_CHANGE, FIRST_CHANGE# AS FIRST_CHANGE FROM V$ARCHIVED_LOG " +
-                "WHERE NAME IS NOT NULL AND ARCHIVED = 'YES' " +
-                "AND STATUS = 'A' AND NEXT_CHANGE# > %s ORDER BY 2", offsetScn);
-
-        final List<LogFile> archiveLogFiles = new ArrayList<>();
-        try (PreparedStatement s = connection.prepareStatement(archiveLogsQuery)) {
-            try (ResultSet rs = s.executeQuery()) {
-                while (rs.next()) {
-                    String fileName = rs.getString(1);
-                    BigInteger firstChangeNumber = new BigInteger(rs.getString(3));
-                    BigInteger nextChangeNumber = new BigInteger(rs.getString(2));
-                    archiveLogFiles.add(new LogFile(fileName, firstChangeNumber, nextChangeNumber, false));
-                }
-            }
-        }
-        return archiveLogFiles;
-    }
-
-    public static void addLogFile(Connection connection, String fileName) throws SQLException {
-        String addLogFile = "BEGIN sys.dbms_logmnr.add_logfile(LOGFILENAME => '%s', OPTIONS => %s);END;";
-        String options = "DBMS_LOGMNR.ADDFILE";
-//        String options = "DBMS_LOGMNR.NEW";
-        executeCallableStatement(connection, String.format(addLogFile, fileName, options));
-    }
-
     public static List<BigInteger> getCurrentRedoLogSequences(Connection connection) throws SQLException {
-        String currentRedoSequence = "SELECT SEQUENCE# FROM V$LOG WHERE STATUS = 'CURRENT'";
-        try (Statement statement = connection.createStatement();
-             ResultSet rs = statement.executeQuery(currentRedoSequence)) {
-            List<BigInteger> sequences = new ArrayList<>();
-            if (rs.next()) {
-                sequences.add(new BigInteger(rs.getString(1)));
+        try (Statement statement = connection.createStatement()) {
+            try (ResultSet rs = statement.executeQuery(LOG_MINER_SQL_CURRENT_REDO_SEQUENCE)) {
+                List<BigInteger> sequences = new ArrayList<>();
+                if (rs.next()) {
+                    sequences.add(new BigInteger(rs.getString(1)));
+                }
+                // 如果是RAC则会返回多个SEQUENCE
+                return sequences;
             }
-            // 如果是RAC则会返回多个SEQUENCE
-            return sequences;
         }
     }
 
-    public static void buildDataDictionary(Connection connection, String miningStrategy) throws SQLException {
-        if (StringUtil.isBlank(miningStrategy)) {
-            // default
-            String sql = "BEGIN DBMS_LOGMNR_D.BUILD (options => DBMS_LOGMNR_D.STORE_IN_REDO_LOGS); END;";
-            executeCallableStatement(connection, sql);
-        }
-    }
-
-    public static void startLogMiner(Connection connection, BigInteger startScn, BigInteger endScn, String miningStrategy) throws SQLException {
-        LOGGER.debug("startLogMiner... startScn {}, endScn {}", startScn, endScn);
-        // default
-        if (StringUtil.isBlank(miningStrategy)) {
-            miningStrategy = "DBMS_LOGMNR.DICT_FROM_REDO_LOGS + DBMS_LOGMNR.DDL_DICT_TRACKING ";
-        }
-
-        String startLogMiner = "BEGIN sys.dbms_logmnr.start_logmnr(" +
-                "startScn => '" + startScn + "', " +
-                "endScn => '" + endScn + "', " +
-                "OPTIONS => " + miningStrategy +
-                " + DBMS_LOGMNR.NO_ROWID_IN_STMT);" +
-                "END;";
-
-        executeCallableStatement(connection, startLogMiner);
-    }
-
     public static void endLogMiner(Connection connection) {
-        try {
-            executeCallableStatement(connection, "BEGIN SYS.DBMS_LOGMNR.END_LOGMNR(); END;");
-        } catch (SQLException e) {
-            if (e.getMessage().toUpperCase().contains("ORA-01307")) {
-                LOGGER.info("LogMiner session was already closed");
-            } else {
-                LOGGER.error("Cannot close LogMiner session gracefully: {}", e);
+        if (connection != null) {
+            try {
+                executeCallableStatement(connection, LOG_MINER_SQL_END_LOG_MINER);
+            } catch (Exception e) {
+                if (e.getMessage().toUpperCase().contains("ORA-01307")) {
+                    logger.info("LogMiner session was already closed", e);
+                } else {
+                    logger.warn("Cannot close log miner session gracefully", e);
+                }
             }
         }
     }
@@ -202,14 +154,81 @@ public class LogMinerHelper {
     }
 
     public static void setSessionParameter(Connection connection) throws SQLException {
-        String sql = "ALTER SESSION SET "
-                + "  NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
-                + "  NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
-                + "  NLS_TIMESTAMP_TZ_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'"
-                + "  NLS_NUMERIC_CHARACTERS = '.,'";
-
-        executeCallableStatement(connection, sql);
-        executeCallableStatement(connection, "ALTER SESSION SET TIME_ZONE = '00:00'");
+        executeCallableStatement(connection, LOG_MINER_SQL_ALTER_NLS_SESSION_PARAMETERS);
+    }
+
+    public static void startLogMiner(Connection connection, long startScn, long endScn) throws SQLException {
+        try (PreparedStatement logMinerStartStmt = connection.prepareCall(LOG_MINER_SQL_START_LOG_MINER)) {
+            logMinerStartStmt.setString(1, String.valueOf(startScn));
+            logMinerStartStmt.setString(2, String.valueOf(endScn));
+            logMinerStartStmt.execute();
+        }
+    }
+
+    public static long getCurrentScn(Connection connection) throws SQLException {
+        try (Statement statement = connection.createStatement()) {
+            try (ResultSet rs = statement.executeQuery(LOG_MINER_SQL_GET_CURRENT_SCN)) {
+                if (!rs.next()) {
+                    throw new IllegalStateException("Couldn't get SCN");
+                }
+                return rs.getLong(1);
+            }
+        }
+    }
+
+    public static void setSessionContainerIfCdbMode(Connection connection) throws SQLException {
+        try (Statement statement = connection.createStatement()) {
+            try (ResultSet rs = statement.executeQuery(LOG_MINER_SQL_IS_CDB)) {
+                rs.next();
+                // cdb模式 需要切换到根容器
+                if (rs.getString(1).equalsIgnoreCase("YES")) {
+                    try (PreparedStatement ps = connection.prepareStatement(LOG_MINER_SQL_ALTER_SESSION_CONTAINER)) {
+                        try {
+                            ps.execute();
+                        } catch (SQLException e) {
+                            throw new OracleException(String.format("sql=%s error=%s", LOG_MINER_SQL_ALTER_SESSION_CONTAINER, e.getMessage()));
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    private static List<String> queryList(Connection connection, String querySql, String key) throws SQLException {
+        List<String> list = new ArrayList<>();
+        try (Statement statement = connection.createStatement()) {
+            try (ResultSet rs = statement.executeQuery(querySql)) {
+                while (rs.next()) {
+                    String k = rs.getString(key);
+                    if (StringUtil.isNotBlank(k)) {
+                        list.add(k.toUpperCase());
+                    }
+                }
+            }
+        }
+        return list;
     }
 
+    public static void checkPermissions(Connection connection, int version) throws SQLException {
+        List<String> roles = queryList(connection, LOG_MINER_SQL_QUERY_ROLES, LOG_MINER_KEY_GRANTED_ROLE);
+        if (CollectionUtils.isEmpty(roles)) {
+            throw new RuntimeException("No permissions");
+        }
+
+        // DBA
+        if (roles.contains(LOG_MINER_DBA_ROLE)) {
+            return;
+        }
+
+        List<String> privileges = queryList(connection, LOG_MINER_SQL_QUERY_PRIVILEGES, LOG_MINER_KEY_PRIVILEGE);
+        if (CollectionUtils.isEmpty(privileges)) {
+            throw new RuntimeException("No permissions");
+        }
+        List<String> checkPrivileges = version <= 11 ? LOG_MINER_ORACLE_11_PRIVILEGES_NEEDED : LOG_MINER_PRIVILEGES_NEEDED;
+        long count = privileges.stream().filter(checkPrivileges::contains).count();
+        if (count != checkPrivileges.size()) {
+            String log = StringUtil.join(Collections.singleton(checkPrivileges), StringUtil.COMMA);
+            throw new IllegalArgumentException(String.format("No permission, please execute sql authorization:GRANT %s TO USER_ROLE;", log));
+        }
+    }
 }

+ 0 - 29
dbsyncer-connector/dbsyncer-connector-oracle/src/main/java/org/dbsyncer/connector/oracle/model/DCNEvent.java

@@ -1,29 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.connector.oracle.model;
-
-public final class DCNEvent {
-
-    private String tableName;
-    private String rowId;
-    private int code;
-
-    public DCNEvent(String tableName, String rowId, int code) {
-        this.tableName = tableName;
-        this.rowId = rowId;
-        this.code = code;
-    }
-
-    public String getTableName() {
-        return tableName;
-    }
-
-    public String getRowId() {
-        return rowId;
-    }
-
-    public int getCode() {
-        return code;
-    }
-}

+ 0 - 104
dbsyncer-connector/dbsyncer-connector-oracle/src/test/java/DBChangeNotificationTest.java

@@ -1,104 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-import oracle.jdbc.OracleStatement;
-import oracle.jdbc.driver.OracleConnection;
-import org.dbsyncer.connector.oracle.dcn.DBChangeNotification;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.*;
-
-/**
- * @Author AE86
- * @Version 1.0.0
- * @Date 2022-06-08 21:53
- */
-public class DBChangeNotificationTest {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    @Test
-    public void testConnect() throws Exception {
-        String username = "ae86";
-        String password = "123";
-        String url = "jdbc:oracle:thin:@127.0.0.1:1521:XE";
-
-        final DBChangeNotification dcn = new DBChangeNotification(username, password, url);
-        dcn.addRowEventListener((e) ->
-            logger.info("{}触发{}, data:{}", e.getSourceTableName(), e.getEvent(), e.getDataList())
-        );
-        dcn.start();
-
-        // 模拟并发
-        final int threadSize = 301;
-        final ExecutorService pool = Executors.newFixedThreadPool(threadSize);
-        final CyclicBarrier barrier = new CyclicBarrier(threadSize);
-        final CountDownLatch latch = new CountDownLatch(threadSize);
-
-        for (int i = 0; i < threadSize; i++) {
-            final int k = i + 3;
-            pool.submit(() -> {
-                try {
-                    barrier.await();
-                    //read(k, dcn);
-
-                    // 模拟写入操作
-                    insert(k, dcn);
-
-                } catch (InterruptedException e) {
-                    logger.error(e.getMessage());
-                } catch (BrokenBarrierException e) {
-                    logger.error(e.getMessage());
-                } finally {
-                    latch.countDown();
-                }
-            });
-        }
-
-        try {
-            latch.await();
-            logger.info("try to close");
-        } catch (InterruptedException e) {
-            logger.error(e.getMessage());
-        }
-        pool.shutdown();
-
-        TimeUnit.SECONDS.sleep(20);
-        dcn.close();
-        logger.info("test end");
-
-    }
-
-    private void insert(int k, DBChangeNotification dcn) {
-        OracleConnection conn = dcn.getOracleConnection();
-        OracleStatement os = null;
-        ResultSet rs = null;
-        try {
-            os = (OracleStatement) conn.createStatement();
-            String sql = "INSERT INTO \"AE86\".\"my_user\"(\"id\", \"name\", \"age\", \"phone\", \"create_date\", \"last_time\", \"money\", \"car\", \"big\", \"clo\", \"rel\") VALUES (" + k + ", '红包', '2', '18200001111', TO_DATE('2015-10-23 00:00:00', 'SYYYY-MM-DD HH24:MI:SS'), TO_TIMESTAMP('2021-01-23 00:00:00.000000', 'SYYYY-MM-DD HH24:MI:SS:FF6'), '200.00000000000000', '4', null, '888', '3.0000000000000000')";
-
-            int i = os.executeUpdate(sql);
-            logger.info("insert:{}, {}", k, i);
-        } catch (SQLException e) {
-            logger.error(e.getMessage());
-        } finally {
-            dcn.close(rs);
-            dcn.close(os);
-        }
-    }
-
-    private void read(final int k, DBChangeNotification dcn) {
-        final String tableName = "my_user";
-        final String rowId = "AAAE5fAABAAALCJAAx";
-        List<Object> data = new ArrayList<>();
-        dcn.read(tableName, rowId, data);
-        logger.info("{}, 【{}】, data:{}", k, data.size(), data);
-    }
-
-}

+ 0 - 94
dbsyncer-connector/dbsyncer-connector-oracle/src/test/java/LinkedBlockingQueueTest.java

@@ -1,94 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-import oracle.jdbc.dcn.TableChangeDescription;
-import org.dbsyncer.common.util.RandomUtil;
-import org.dbsyncer.connector.oracle.model.DCNEvent;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-/**
- * @Author AE86
- * @Version 1.0.0
- * @Date 2022-06-08 21:53
- */
-public class LinkedBlockingQueueTest {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-    private BlockingQueue queue = new LinkedBlockingQueue<>(10);
-
-    @Test
-    public void testProducerAndConsumer() throws InterruptedException {
-        logger.info("test begin");
-        new Producer(queue).start();
-        new Consumer(queue).start();
-        new Consumer(queue).start();
-        TimeUnit.SECONDS.sleep(60);
-        logger.info("test end");
-    }
-
-    /**
-     * 生产
-     */
-    class Producer extends Thread {
-
-        BlockingQueue<DCNEvent> queue;
-        int taskNumber = 50;
-
-        public Producer(BlockingQueue<DCNEvent> queue) {
-            setName("Producer-thread");
-            this.queue = queue;
-        }
-
-        @Override
-        public void run() {
-            logger.info("生产线程{}开始工作", Thread.currentThread().getName());
-            for (int i = 0; i < taskNumber; i++) {
-                DCNEvent event = new DCNEvent("my_user" + i, "AAAF8BAABAAALJBAAA", TableChangeDescription.TableOperation.INSERT.getCode());
-                try {
-                    // 如果BlockQueue没有空间,则调用此方法的线程被阻断直到BlockingQueue里面有空间再继续
-                    queue.put(event);
-                } catch (InterruptedException e) {
-                    logger.error("添加消息:{}, 失败", event, e.getMessage());
-                }
-            }
-            logger.info("生产线程{}结束工作", Thread.currentThread().getName());
-        }
-    }
-
-    /**
-     * 消费
-     */
-    class Consumer extends Thread {
-
-        BlockingQueue<DCNEvent> queue;
-
-        public Consumer(BlockingQueue<DCNEvent> queue) {
-            setName("Consumer-thread-" + RandomUtil.nextInt(1, 100));
-            this.queue = queue;
-        }
-
-        @Override
-        public void run() {
-            String threadName = Thread.currentThread().getName();
-            logger.info("消费线程{}开始工作", threadName);
-            while (true) {
-                try {
-                    // 模拟耗时
-                    TimeUnit.SECONDS.sleep(RandomUtil.nextInt(0, 3));
-                    // 取走BlockingQueue里排在首位的对象,若BlockingQueue为空,阻断进入等待状态直到Blocking有新的对象被加入为止
-                    DCNEvent event = queue.take();
-                    logger.error("消费线程{}接受消息:{}", threadName, event.getTableName());
-                } catch (InterruptedException e) {
-                    logger.error(e.getMessage());
-                }
-            }
-        }
-    }
-
-}

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/DQLPostgreSQLConnector.java

@@ -23,12 +23,11 @@ import org.dbsyncer.sdk.plugin.ReaderContext;
  */
 public final class DQLPostgreSQLConnector extends AbstractDQLConnector {
 
-    private final String TYPE = "DqlPostgreSQL";
     private final DqlPostgreSQLConfigValidator configValidator = new DqlPostgreSQLConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "DqlPostgreSQL";
     }
 
     @Override

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/PostgreSQLConnector.java

@@ -38,7 +38,6 @@ public final class PostgreSQLConnector extends AbstractDatabaseConnector {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    private final String TYPE = "PostgreSQL";
     private final PostgreSQLConfigValidator configValidator = new PostgreSQLConfigValidator();
 
     public PostgreSQLConnector() {
@@ -48,7 +47,7 @@ public final class PostgreSQLConnector extends AbstractDatabaseConnector {
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "PostgreSQL";
     }
 
     @Override

+ 2 - 2
dbsyncer-connector/dbsyncer-connector-postgresql/src/main/java/org/dbsyncer/connector/postgresql/decoder/impl/PgOutputMessageDecoder.java

@@ -21,9 +21,9 @@ import org.springframework.util.Assert;
 import java.nio.ByteBuffer;
 import java.time.LocalDateTime;
 import java.util.ArrayList;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @Author AE86
@@ -36,7 +36,7 @@ public class PgOutputMessageDecoder extends AbstractMessageDecoder {
 
     private static final LocalDateTime PG_EPOCH = LocalDateTime.of(2000, 1, 1, 0, 0, 0);
     private static final String GET_TABLE_SCHEMA = "select t.oid,t.relname as tableName from pg_class t inner join (select ns.oid as nspoid, ns.nspname from pg_namespace ns where ns.nspname = '%s') as n on n.nspoid = t.relnamespace where relkind = 'r'";
-    private static final Map<Integer, TableId> tables = new LinkedHashMap<>();
+    private static final Map<Integer, TableId> tables = new ConcurrentHashMap<>();
     private ConnectorService connectorService;
     private DatabaseConnectorInstance connectorInstance;
 

+ 1 - 3
dbsyncer-connector/dbsyncer-connector-sqlite/src/main/java/org/dbsyncer/connector/sqlite/DqlSQLiteConnector.java

@@ -26,13 +26,11 @@ import java.util.List;
  */
 public final class DqlSQLiteConnector extends AbstractDQLConnector {
 
-    private final String TYPE = "DqlSQLite";
-
     private final DqlSQLiteConfigValidator configValidator = new DqlSQLiteConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "DqlSQLite";
     }
 
     @Override

+ 1 - 2
dbsyncer-connector/dbsyncer-connector-sqlite/src/main/java/org/dbsyncer/connector/sqlite/SQLiteConnector.java

@@ -36,12 +36,11 @@ public final class SQLiteConnector extends AbstractDatabaseConnector {
     private final String QUERY_VIEW = "SELECT name FROM sqlite_master WHERE type = 'view'";
     private final String QUERY_TABLE = "SELECT name FROM sqlite_master WHERE type='table'";
 
-    private final String TYPE = "SQLite";
     private final SQLiteConfigValidator configValidator = new SQLiteConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "SQLite";
     }
 
     @Override

+ 0 - 0
dbsyncer-connector/dbsyncer-connector-sqlite/src/main/resources/static/img/sqlite.png → dbsyncer-connector/dbsyncer-connector-sqlite/src/main/resources/static/img/SQLite.png


+ 1 - 2
dbsyncer-connector/dbsyncer-connector-sqlserver/src/main/java/org/dbsyncer/connector/sqlserver/DQLSqlServerConnector.java

@@ -26,12 +26,11 @@ import java.util.List;
  */
 public final class DQLSqlServerConnector extends AbstractDQLConnector {
 
-    private final String TYPE = "DqlSqlServer";
     private final DqlSqlServerConfigValidator configValidator = new DqlSqlServerConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "DqlSqlServer";
     }
 
     @Override

+ 23 - 2
dbsyncer-connector/dbsyncer-connector-sqlserver/src/main/java/org/dbsyncer/connector/sqlserver/SqlServerConnector.java

@@ -4,7 +4,9 @@
 package org.dbsyncer.connector.sqlserver;
 
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.DateFormatUtil;
 import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.connector.sqlserver.cdc.Lsn;
 import org.dbsyncer.connector.sqlserver.cdc.SqlServerListener;
 import org.dbsyncer.connector.sqlserver.validator.SqlServerConfigValidator;
 import org.dbsyncer.sdk.config.CommandConfig;
@@ -23,6 +25,7 @@ import org.dbsyncer.sdk.model.PageSql;
 import org.dbsyncer.sdk.model.Table;
 import org.dbsyncer.sdk.plugin.ReaderContext;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -43,12 +46,11 @@ public final class SqlServerConnector extends AbstractDatabaseConnector {
     private final String SET_TABLE_IDENTITY_ON = "set identity_insert %s.[%s] on;";
     private final String SET_TABLE_IDENTITY_OFF = ";set identity_insert %s.[%s] off;";
 
-    private final String TYPE = "SqlServer";
     private final SqlServerConfigValidator configValidator = new SqlServerConfigValidator();
 
     @Override
     public String getConnectorType() {
-        return TYPE;
+        return "SqlServer";
     }
 
     @Override
@@ -148,6 +150,25 @@ public final class SqlServerConnector extends AbstractDatabaseConnector {
         return targetCommand;
     }
 
+    @Override
+    public Object getPosition(DatabaseConnectorInstance connectorInstance) {
+        String sql = "SELECT * from cdc.lsn_time_mapping order by tran_begin_time desc";
+        List<Map<String, Object>> result = connectorInstance.execute(databaseTemplate -> databaseTemplate.queryForList(sql));
+        if (!CollectionUtils.isEmpty(result)) {
+            List<Object> list = new ArrayList<>();
+            result.forEach(r -> {
+                r.computeIfPresent("start_lsn", (k, lsn)-> new Lsn((byte[]) lsn).toString());
+                r.computeIfPresent("tran_begin_lsn", (k, lsn)-> new Lsn((byte[]) lsn).toString());
+                r.computeIfPresent("tran_id", (k, lsn)-> new Lsn((byte[]) lsn).toString());
+                r.computeIfPresent("tran_begin_time", (k, tranBeginTime)-> DateFormatUtil.timestampToString((Timestamp) tranBeginTime));
+                r.computeIfPresent("tran_end_time", (k, tranEndTime)-> DateFormatUtil.timestampToString((Timestamp) tranEndTime));
+                list.add(r);
+            });
+            return list;
+        }
+        return result;
+    }
+
     private String convertKey(String key) {
         return new StringBuilder("[").append(key).append("]").toString();
     }

+ 15 - 20
dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/FullPuller.java

@@ -5,18 +5,18 @@ package org.dbsyncer.manager.impl;
 
 import org.dbsyncer.common.util.NumberUtil;
 import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.sdk.util.PrimaryKeyUtil;
 import org.dbsyncer.manager.AbstractPuller;
+import org.dbsyncer.parser.LogService;
+import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.ParserComponent;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.enums.ParserEnum;
 import org.dbsyncer.parser.event.FullRefreshEvent;
-import org.dbsyncer.parser.LogService;
-import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.parser.model.Task;
+import org.dbsyncer.sdk.util.PrimaryKeyUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.context.ApplicationListener;
@@ -53,29 +53,25 @@ public final class FullPuller extends AbstractPuller implements ApplicationListe
     @Resource
     private LogService logService;
 
-    private Map<String, Task> map = new ConcurrentHashMap<>();
+    private final Map<String, Task> map = new ConcurrentHashMap<>();
 
     @Override
     public void start(Mapping mapping) {
+        List<TableGroup> list = profileComponent.getSortedTableGroupAll(mapping.getId());
+        Assert.notEmpty(list, "映射关系不能为空");
         Thread worker = new Thread(() -> {
             final String metaId = mapping.getMetaId();
-            ExecutorService executor = null;
+            ExecutorService executor = Executors.newFixedThreadPool(mapping.getThreadNum());
             try {
-                List<TableGroup> list = profileComponent.getSortedTableGroupAll(mapping.getId());
-                Assert.notEmpty(list, "映射关系不能为空");
+                Task task = map.computeIfAbsent(metaId, k -> new Task(metaId));
                 logger.info("开始全量同步:{}, {}", metaId, mapping.getName());
-                map.putIfAbsent(metaId, new Task(metaId));
-                executor = Executors.newFixedThreadPool(mapping.getThreadNum());
-                Task task = map.get(metaId);
                 doTask(task, mapping, list, executor);
             } catch (Exception e) {
                 logger.error(e.getMessage(), e);
                 logService.log(LogType.SystemLog.ERROR, e.getMessage());
             } finally {
                 try {
-                    if (executor != null) {
-                        executor.shutdown();
-                    }
+                    executor.shutdown();
                 } catch (Exception e) {
                     logService.log(LogType.SystemLog.ERROR, e.getMessage());
                 }
@@ -84,17 +80,17 @@ public final class FullPuller extends AbstractPuller implements ApplicationListe
                 logger.info("结束全量同步:{}, {}", metaId, mapping.getName());
             }
         });
-        worker.setName(new StringBuilder("full-worker-").append(mapping.getId()).toString());
+        worker.setName("full-worker-" + mapping.getId());
         worker.setDaemon(false);
         worker.start();
     }
 
     @Override
     public void close(String metaId) {
-        Task task = map.get(metaId);
-        if (null != task) {
+        map.computeIfPresent(metaId, (k, task) -> {
             task.stop();
-        }
+            return null;
+        });
     }
 
     @Override
@@ -114,8 +110,7 @@ public final class FullPuller extends AbstractPuller implements ApplicationListe
         Map<String, String> snapshot = meta.getSnapshot();
         task.setPageIndex(NumberUtil.toInt(snapshot.get(ParserEnum.PAGE_INDEX.getCode()), ParserEnum.PAGE_INDEX.getDefaultValue()));
         // 反序列化游标值类型(通常为数字或字符串类型)
-        String cursorValue = snapshot.get(ParserEnum.CURSOR.getCode());
-        task.setCursors(PrimaryKeyUtil.getLastCursors(cursorValue));
+        task.setCursors(PrimaryKeyUtil.getLastCursors(snapshot.get(ParserEnum.CURSOR.getCode())));
         task.setTableGroupIndex(NumberUtil.toInt(snapshot.get(ParserEnum.TABLE_GROUP_INDEX.getCode()), ParserEnum.TABLE_GROUP_INDEX.getDefaultValue()));
         flush(task);
 
@@ -152,7 +147,7 @@ public final class FullPuller extends AbstractPuller implements ApplicationListe
         meta.setUpdateTime(Instant.now().toEpochMilli());
         Map<String, String> snapshot = meta.getSnapshot();
         snapshot.put(ParserEnum.PAGE_INDEX.getCode(), String.valueOf(task.getPageIndex()));
-        snapshot.put(ParserEnum.CURSOR.getCode(), StringUtil.join(task.getCursors(), ","));
+        snapshot.put(ParserEnum.CURSOR.getCode(), StringUtil.getIfBlank(StringUtil.join(task.getCursors(), StringUtil.COMMA), StringUtil.EMPTY));
         snapshot.put(ParserEnum.TABLE_GROUP_INDEX.getCode(), String.valueOf(task.getTableGroupIndex()));
         profileComponent.editConfigModel(meta);
     }

+ 42 - 50
dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/IncrementPuller.java

@@ -3,32 +3,26 @@
  */
 package org.dbsyncer.manager.impl;
 
-import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.scheduled.ScheduledTaskJob;
+import org.dbsyncer.common.scheduled.ScheduledTaskService;
 import org.dbsyncer.connector.base.ConnectorFactory;
 import org.dbsyncer.manager.AbstractPuller;
 import org.dbsyncer.manager.ManagerException;
 import org.dbsyncer.parser.LogService;
 import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.ProfileComponent;
-import org.dbsyncer.parser.consumer.impl.LogConsumer;
-import org.dbsyncer.parser.consumer.impl.QuartzConsumer;
+import org.dbsyncer.parser.TableGroupContext;
+import org.dbsyncer.parser.consumer.ParserConsumer;
 import org.dbsyncer.parser.event.RefreshOffsetEvent;
 import org.dbsyncer.parser.flush.impl.BufferActuatorRouter;
-import org.dbsyncer.parser.model.Connector;
-import org.dbsyncer.parser.model.Mapping;
-import org.dbsyncer.parser.model.Meta;
-import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.*;
+import org.dbsyncer.parser.util.PickerUtil;
 import org.dbsyncer.sdk.config.ListenerConfig;
 import org.dbsyncer.sdk.enums.ListenerTypeEnum;
 import org.dbsyncer.sdk.listener.AbstractListener;
 import org.dbsyncer.sdk.listener.AbstractQuartzListener;
 import org.dbsyncer.sdk.listener.Listener;
-import org.dbsyncer.sdk.model.ChangedOffset;
-import org.dbsyncer.sdk.model.ConnectorConfig;
-import org.dbsyncer.sdk.model.Table;
-import org.dbsyncer.sdk.model.TableGroupQuartzCommand;
-import org.dbsyncer.common.scheduled.ScheduledTaskJob;
-import org.dbsyncer.common.scheduled.ScheduledTaskService;
+import org.dbsyncer.sdk.model.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.context.ApplicationListener;
@@ -38,11 +32,7 @@ import org.springframework.util.Assert;
 import javax.annotation.PostConstruct;
 import javax.annotation.Resource;
 import java.time.Instant;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
@@ -73,7 +63,10 @@ public final class IncrementPuller extends AbstractPuller implements Application
     @Resource
     private LogService logService;
 
-    private Map<String, Listener> map = new ConcurrentHashMap<>();
+    @Resource
+    private TableGroupContext tableGroupContext;
+
+    private final Map<String, Listener> map = new ConcurrentHashMap<>();
 
     @PostConstruct
     private void init() {
@@ -84,7 +77,6 @@ public final class IncrementPuller extends AbstractPuller implements Application
     public void start(Mapping mapping) {
         final String mappingId = mapping.getId();
         final String metaId = mapping.getMetaId();
-        logger.info("开始增量同步:{}, {}", metaId, mapping.getName());
         Connector connector = profileComponent.getConnector(mapping.getSourceConnectorId());
         Assert.notNull(connector, "连接器不能为空.");
         List<TableGroup> list = profileComponent.getSortedTableGroupAll(mappingId);
@@ -94,51 +86,50 @@ public final class IncrementPuller extends AbstractPuller implements Application
 
         Thread worker = new Thread(() -> {
             try {
-                long now = Instant.now().toEpochMilli();
-                meta.setBeginTime(now);
-                meta.setEndTime(now);
-                profileComponent.editConfigModel(meta);
-                map.putIfAbsent(metaId, getListener(mapping, connector, list, meta));
-                map.get(metaId).start();
+                map.computeIfAbsent(metaId, k-> {
+                    logger.info("开始增量同步:{}, {}", metaId, mapping.getName());
+                    long now = Instant.now().toEpochMilli();
+                    meta.setBeginTime(now);
+                    meta.setEndTime(now);
+                    profileComponent.editConfigModel(meta);
+                    tableGroupContext.put(mapping, list);
+                    return getListener(mapping, connector, list, meta);
+                }).start();
             } catch (Exception e) {
                 close(metaId);
                 logService.log(LogType.TableGroupLog.INCREMENT_FAILED, e.getMessage());
                 logger.error("运行异常,结束增量同步{}:{}", metaId, e.getMessage());
             }
         });
-        worker.setName(new StringBuilder("increment-worker-").append(mapping.getId()).toString());
+        worker.setName("increment-worker-" + mapping.getId());
         worker.setDaemon(false);
         worker.start();
     }
 
     @Override
     public void close(String metaId) {
-        Listener listener = map.get(metaId);
-        if (null != listener) {
-            bufferActuatorRouter.unbind(metaId);
+        map.computeIfPresent(metaId, (k, listener)->{
             listener.close();
-        }
-        map.remove(metaId);
-        publishClosedEvent(metaId);
-        logger.info("关闭成功:{}", metaId);
+            bufferActuatorRouter.unbind(metaId);
+            tableGroupContext.clear(metaId);
+            publishClosedEvent(metaId);
+            logger.info("关闭成功:{}", metaId);
+            return null;
+        });
     }
 
     @Override
     public void onApplicationEvent(RefreshOffsetEvent event) {
-        List<ChangedOffset> offsetList = event.getOffsetList();
-        if (!CollectionUtils.isEmpty(offsetList)) {
-            offsetList.forEach(offset -> {
-                if (offset.isRefreshOffset() && map.containsKey(offset.getMetaId())) {
-                    map.get(offset.getMetaId()).refreshEvent(offset);
-                }
-            });
+        ChangedOffset offset = event.getChangedOffset();
+        if (offset != null && map.containsKey(offset.getMetaId())) {
+            map.get(offset.getMetaId()).refreshEvent(offset);
         }
     }
 
     @Override
     public void run() {
         // 定时同步增量信息
-        map.values().forEach(listener -> listener.flushEvent());
+        map.values().forEach(Listener::flushEvent);
     }
 
     private Listener getListener(Mapping mapping, Connector connector, List<TableGroup> list, Meta meta) {
@@ -150,18 +141,19 @@ public final class IncrementPuller extends AbstractPuller implements Application
         if (null == listener) {
             throw new ManagerException(String.format("Unsupported listener type \"%s\".", connectorConfig.getConnectorType()));
         }
+        listener.register(new ParserConsumer(bufferActuatorRouter, profileComponent, logService, meta.getId(), list));
 
         // 默认定时抽取
         if (ListenerTypeEnum.isTiming(listenerType) && listener instanceof AbstractQuartzListener) {
             AbstractQuartzListener quartzListener = (AbstractQuartzListener) listener;
-            quartzListener.setCommands(list.stream().map(t -> new TableGroupQuartzCommand(t.getSourceTable(), t.getCommand())).collect(Collectors.toList()));
-            quartzListener.register(new QuartzConsumer().init(bufferActuatorRouter, profileComponent, logService, meta.getId(), mapping, list));
-        }
-
-        // 基于日志抽取
-        if (ListenerTypeEnum.isLog(listenerType) && listener instanceof AbstractListener) {
-            AbstractListener abstractListener = (AbstractListener) listener;
-            abstractListener.register(new LogConsumer().init(bufferActuatorRouter, profileComponent, logService, meta.getId(), mapping, list));
+            List<TableGroupQuartzCommand> quartzCommands = list.stream().map(t -> {
+                final TableGroup group = PickerUtil.mergeTableGroupConfig(mapping, t);
+                final Picker picker = new Picker(group);
+                List<Field> fields = picker.getSourceFields();
+                Assert.notEmpty(fields, "表字段映射关系不能为空:" + group.getSourceTable().getName() + " > " + group.getTargetTable().getName());
+                return new TableGroupQuartzCommand(t.getSourceTable(), fields, t.getCommand());
+            }).collect(Collectors.toList());
+            quartzListener.setCommands(quartzCommands);
         }
 
         if (listener instanceof AbstractListener) {

+ 5 - 9
dbsyncer-manager/src/main/java/org/dbsyncer/manager/impl/PreloadTemplate.java

@@ -18,6 +18,7 @@ import org.dbsyncer.parser.enums.MetaEnum;
 import org.dbsyncer.parser.impl.OperationTemplate;
 import org.dbsyncer.parser.model.ConfigModel;
 import org.dbsyncer.parser.model.Connector;
+import org.dbsyncer.parser.model.Group;
 import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.parser.model.OperationConfig;
@@ -175,18 +176,13 @@ public final class PreloadTemplate implements ApplicationListener<ContextRefresh
         if (null == config) {
             return;
         }
-        OperationTemplate.Group group = JsonUtil.jsonToObj(config.toString(), OperationTemplate.Group.class);
-        if (null == group) {
+        Group group = JsonUtil.jsonToObj(config.toString(), Group.class);
+        if (null == group || group.isEmpty()) {
             return;
         }
 
-        List<String> index = group.getIndex();
-        if (CollectionUtils.isEmpty(index)) {
-            return;
-        }
-
-        for (String e : index) {
-            Map m = map.get(e);
+        for (String id : group.getIndex()) {
+            Map m = map.get(id);
             ConfigModel model = (ConfigModel) commandEnum.getCommandExecutor().execute(new PreloadCommand(profileComponent, m.toString()));
             operationTemplate.execute(new OperationConfig(model, CommandEnum.OPR_ADD, commandEnum.getGroupStrategyEnum()));
             // Load tableGroups

+ 2 - 4
dbsyncer-parser/src/main/java/org/dbsyncer/parser/CacheService.java

@@ -19,13 +19,11 @@ public interface CacheService {
     Object put(String key, Object value);
 
     /**
-     * 存放K-V,不存在k则写入
+     * 获取缓
      *
-     * @param key
-     * @param value
      * @return
      */
-    Object putIfAbsent(String key, Object value);
+    Map<String, Object> getCache();
 
     /**
      * 根据Key删除

+ 26 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/TableGroupContext.java

@@ -0,0 +1,26 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
+package org.dbsyncer.parser;
+
+import org.dbsyncer.parser.model.Mapping;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.TableGroupPicker;
+
+import java.util.List;
+
+/**
+ * @Author 穿云
+ * @Version 1.0.0
+ * @Date 2025-01-16 23:48
+ */
+public interface TableGroupContext {
+
+    void put(Mapping mapping, List<TableGroup> tableGroups);
+
+    void update(Mapping mapping, List<TableGroup> tableGroups);
+
+    List<TableGroupPicker> getTableGroupPickers(String metaId, String tableName);
+
+    void clear(String metaId);
+}

+ 9 - 38
dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/AbstractConsumer.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/ParserConsumer.java

@@ -7,12 +7,10 @@ import org.dbsyncer.parser.LogService;
 import org.dbsyncer.parser.LogType;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.flush.impl.BufferActuatorRouter;
-import org.dbsyncer.parser.model.Mapping;
 import org.dbsyncer.parser.model.Meta;
 import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.sdk.listener.ChangedEvent;
 import org.dbsyncer.sdk.listener.Watcher;
-import org.dbsyncer.sdk.listener.event.DDLChangedEvent;
 
 import java.util.List;
 import java.util.Map;
@@ -22,44 +20,25 @@ import java.util.Map;
  * @Author AE86
  * @Date 2023-11-12 01:32
  */
-public abstract class AbstractConsumer<E extends ChangedEvent> implements Watcher {
-    private BufferActuatorRouter bufferActuatorRouter;
-    private ProfileComponent profileComponent;
-    private LogService logService;
-    private String metaId;
-    protected Mapping mapping;
-    protected List<TableGroup> tableGroups;
+public final class ParserConsumer implements Watcher {
+    private final BufferActuatorRouter bufferActuatorRouter;
+    private final ProfileComponent profileComponent;
+    private final LogService logService;
+    private final String metaId;
 
-    public AbstractConsumer init(BufferActuatorRouter bufferActuatorRouter, ProfileComponent profileComponent, LogService logService, String metaId, Mapping mapping, List<TableGroup> tableGroups) {
+    public ParserConsumer(BufferActuatorRouter bufferActuatorRouter, ProfileComponent profileComponent, LogService logService, String metaId, List<TableGroup> tableGroups) {
         this.bufferActuatorRouter = bufferActuatorRouter;
         this.profileComponent = profileComponent;
         this.logService = logService;
         this.metaId = metaId;
-        this.mapping = mapping;
-        this.tableGroups = tableGroups;
-        postProcessBeforeInitialization();
-        return this;
-    }
-
-    public abstract void postProcessBeforeInitialization();
-
-    public abstract void onChange(E e);
-
-    public void onDDLChanged(DDLChangedEvent event) {
+        // 注册到路由服务中
+        bufferActuatorRouter.bind(metaId, tableGroups);
     }
 
     @Override
     public void changeEvent(ChangedEvent event) {
         event.getChangedOffset().setMetaId(metaId);
-        switch (event.getType()){
-            case ROW:
-            case SCAN:
-                onChange((E) event);
-                break;
-            case DDL:
-                onDDLChanged((DDLChangedEvent) event);
-                break;
-        }
+        bufferActuatorRouter.execute(metaId, event);
     }
 
     @Override
@@ -81,12 +60,4 @@ public abstract class AbstractConsumer<E extends ChangedEvent> implements Watche
         Meta meta = profileComponent.getMeta(metaId);
         return meta != null ? meta.getUpdateTime() : 0L;
     }
-
-    protected void bind(String tableGroupId) {
-        bufferActuatorRouter.bind(metaId, tableGroupId);
-    }
-
-    protected void execute(String tableGroupId, ChangedEvent event) {
-        bufferActuatorRouter.execute(metaId, tableGroupId, event);
-    }
 }

+ 0 - 87
dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/impl/LogConsumer.java

@@ -1,87 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.parser.consumer.impl;
-
-import org.dbsyncer.sdk.listener.event.CommonChangedEvent;
-import org.dbsyncer.sdk.listener.event.DDLChangedEvent;
-import org.dbsyncer.sdk.listener.event.RowChangedEvent;
-import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.parser.consumer.AbstractConsumer;
-import org.dbsyncer.parser.model.FieldPicker;
-import org.dbsyncer.parser.model.TableGroup;
-import org.dbsyncer.parser.util.PickerUtil;
-import org.dbsyncer.sdk.model.Table;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Consumer;
-
-/**
- * 日志消费
- *
- * @Version 1.0.0
- * @Author AE86
- * @Date 2023-11-12 02:25
- */
-public final class LogConsumer extends AbstractConsumer<RowChangedEvent> {
-    private final Map<String, List<FieldPicker>> tablePicker = new LinkedHashMap<>();
-
-    //判断上次是否为ddl,是ddl需要强制刷新下picker
-    private boolean ddlChanged;
-
-    @Override
-    public void postProcessBeforeInitialization() {
-        addTablePicker(true);
-    }
-
-    @Override
-    public void onChange(RowChangedEvent event) {
-        // 需要强制刷新 fix https://gitee.com/ghi/dbsyncer/issues/I8DJUR
-        if (ddlChanged) {
-            addTablePicker(false);
-            ddlChanged = false;
-        }
-        process(event, picker -> {
-            final Map<String, Object> changedRow = picker.getColumns(event.getDataList());
-            if (picker.filter(changedRow)) {
-                event.setChangedRow(changedRow);
-                execute(picker.getTableGroup().getId(), event);
-            }
-        });
-    }
-
-    @Override
-    public void onDDLChanged(DDLChangedEvent event) {
-        ddlChanged = true;
-        process(event, picker -> execute(picker.getTableGroup().getId(), event));
-    }
-
-    private void process(CommonChangedEvent event, Consumer<FieldPicker> consumer) {
-        // 处理过程有异常向上抛
-        List<FieldPicker> pickers = tablePicker.get(event.getSourceTableName());
-        if (!CollectionUtils.isEmpty(pickers)) {
-            // 触发刷新增量点事件
-            event.getChangedOffset().setRefreshOffset(true);
-            pickers.forEach(picker -> consumer.accept(picker));
-        }
-    }
-
-    private void addTablePicker(boolean bindBufferActuatorRouter) {
-        this.tablePicker.clear();
-        this.tableGroups.forEach(t -> {
-            final Table table = t.getSourceTable();
-            final String tableName = table.getName();
-            tablePicker.putIfAbsent(tableName, new ArrayList<>());
-            TableGroup group = PickerUtil.mergeTableGroupConfig(mapping, t);
-            tablePicker.get(tableName).add(new FieldPicker(group, group.getFilter(), table.getColumn(), group.getFieldMapping()));
-            // 是否注册到路由服务中
-            if (bindBufferActuatorRouter) {
-                bind(group.getId());
-            }
-        });
-    }
-
-}

+ 0 - 42
dbsyncer-parser/src/main/java/org/dbsyncer/parser/consumer/impl/QuartzConsumer.java

@@ -1,42 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.parser.consumer.impl;
-
-import org.dbsyncer.sdk.listener.event.ScanChangedEvent;
-import org.dbsyncer.parser.consumer.AbstractConsumer;
-import org.dbsyncer.parser.model.FieldPicker;
-import org.dbsyncer.parser.model.TableGroup;
-import org.dbsyncer.parser.util.PickerUtil;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * 定时消费
- *
- * @Version 1.0.0
- * @Author AE86
- * @Date 2023-11-12 02:18
- */
-public final class QuartzConsumer extends AbstractConsumer<ScanChangedEvent> {
-    private final List<FieldPicker> tablePicker = new LinkedList<>();
-
-    @Override
-    public void postProcessBeforeInitialization() {
-        tableGroups.forEach(t -> {
-            tablePicker.add(new FieldPicker(PickerUtil.mergeTableGroupConfig(mapping, t)));
-            bind(t.getId());
-        });
-    }
-
-    @Override
-    public void onChange(ScanChangedEvent event) {
-        final FieldPicker picker = tablePicker.get(event.getTableGroupIndex());
-        TableGroup tableGroup = picker.getTableGroup();
-        event.setSourceTableName(tableGroup.getSourceTable().getName());
-
-        // 定时暂不支持触发刷新增量点事件
-        execute(tableGroup.getId(), event);
-    }
-}

+ 4 - 5
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/AlterStrategy.java

@@ -1,11 +1,11 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
 package org.dbsyncer.parser.ddl;
 
 import net.sf.jsqlparser.statement.alter.AlterExpression;
-import org.dbsyncer.parser.model.FieldMapping;
 import org.dbsyncer.sdk.config.DDLConfig;
 
-import java.util.List;
-
 /**
  * Alter策略
  *
@@ -20,7 +20,6 @@ public interface AlterStrategy {
      *
      * @param expression
      * @param ddlConfig
-     * @param fieldMappingList
      */
-    void parse(AlterExpression expression, DDLConfig ddlConfig, List<FieldMapping> fieldMappingList);
+    void parse(AlterExpression expression, DDLConfig ddlConfig);
 }

+ 5 - 23
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/DDLParser.java

@@ -3,32 +3,14 @@
  */
 package org.dbsyncer.parser.ddl;
 
-import org.dbsyncer.parser.model.FieldMapping;
+import net.sf.jsqlparser.JSQLParserException;
+import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.sdk.config.DDLConfig;
-import org.dbsyncer.sdk.model.MetaInfo;
-
-import java.util.List;
+import org.dbsyncer.sdk.spi.ConnectorService;
 
 public interface DDLParser {
 
-    /**
-     * 解析DDL配置
-     *
-     * @param sql                   源表ALTER语句
-     * @param targetConnectorType   目标连接器类型
-     * @param targetTableName       目标表
-     * @param originalFieldMappings 字段映射关系
-     * @return
-     */
-    DDLConfig parseDDlConfig(String sql, String targetConnectorType, String targetTableName, List<FieldMapping> originalFieldMappings);
+    DDLConfig parse(ConnectorService connectorService, TableGroup tableGroup, String sql) throws JSQLParserException;
 
-    /**
-     * 刷新字段映射关系(根据原来的映射关系和更改的字段进行新关系的映射组合)
-     *
-     * @param originalFieldMappings
-     * @param originMetaInfo
-     * @param targetMetaInfo
-     * @param targetDDLConfig
-     */
-    List<FieldMapping> refreshFiledMappings(List<FieldMapping> originalFieldMappings, MetaInfo originMetaInfo, MetaInfo targetMetaInfo, DDLConfig targetDDLConfig);
+    void refreshFiledMappings(TableGroup tableGroup, DDLConfig targetDDLConfig);
 }

+ 17 - 76
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/AddStrategy.java

@@ -1,95 +1,36 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
 package org.dbsyncer.parser.ddl.alter;
 
 import net.sf.jsqlparser.statement.alter.AlterExpression;
-import net.sf.jsqlparser.statement.create.table.Index;
-import net.sf.jsqlparser.statement.create.table.Index.ColumnParams;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ddl.AlterStrategy;
-import org.dbsyncer.parser.model.FieldMapping;
 import org.dbsyncer.sdk.config.DDLConfig;
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
-import org.dbsyncer.sdk.model.Field;
-
-import java.util.LinkedList;
-import java.util.List;
 
 /**
- * 解析add的属性 exampleSql: ALTER TABLE cost ADD duan INT after(before) `tmp`;
+ * 新增字段
+ * <code>
+ *     ALTER TABLE `test`.`test_user`
+ * ADD COLUMN `aaa` varchar(255) NULL AFTER `create_date`,
+ * ADD COLUMN `bbb` varchar(255) NULL AFTER `aaa`
+ * </code>
  *
  * @author life
  */
 public class AddStrategy implements AlterStrategy {
 
     @Override
-    public void parse(AlterExpression expression, DDLConfig ddlConfig,
-            List<FieldMapping> originFiledMapping) {
+    public void parse(AlterExpression expression, DDLConfig ddlConfig) {
         if (expression.getColDataTypeList() != null) {
-            parseAddColumn(expression, ddlConfig, originFiledMapping);
-        }
-        if (expression.getIndex() != null) {
-            parseAddIndex(expression, originFiledMapping);
-        }
-        ddlConfig.setDdlOperationEnum(DDLOperationEnum.ALTER_ADD);
-    }
-
-    //解析增加列
-    //exampleSql: ALTER TABLE cost ADD duan INT after(before) `tmp`;
-    private void parseAddColumn(AlterExpression expression, DDLConfig ddlConfig,
-            List<FieldMapping> originFiledMapping) {
-        //如果是增加列
-        for (AlterExpression.ColumnDataType columnDataType : expression.getColDataTypeList()) {
-            boolean findColumn = false;
-            List<String> columnSpecs = new LinkedList<>();
-            for (String spe : columnDataType.getColumnSpecs()) {//对一before,after进行处理
-                spe = StringUtil.replace(spe, StringUtil.BACK_QUOTE, StringUtil.EMPTY);
-                spe = StringUtil.replace(spe, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
-                if (findColumn) {
-                    //对before(after)字段进行映射
-                    String finalSpe = spe;
-                    FieldMapping fieldMapping = originFiledMapping.stream()
-                            .filter(x -> StringUtil.equals(x.getSource().getName(), finalSpe))
-                            .findFirst().get();
-                    columnSpecs.add(fieldMapping.getTarget().getName());
-                    findColumn = false;
-                    continue;
-                }
-
-                if (StringUtil.equalsIgnoreCase(spe, "before") || StringUtil.equalsIgnoreCase(spe,
-                        "after")) {
-                    findColumn = true;
-                }
-                columnSpecs.add(spe);
+            for (AlterExpression.ColumnDataType columnDataType : expression.getColDataTypeList()) {
+                String columName = columnDataType.getColumnName();
+                columName = StringUtil.replace(columName, StringUtil.BACK_QUOTE, StringUtil.EMPTY);
+                columName = StringUtil.replace(columName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
+                ddlConfig.getAddedFieldNames().add(columName);
             }
-            columnDataType.setColumnSpecs(columnSpecs);
-            String columName = columnDataType.getColumnName();
-            columName = StringUtil.replace(columName, StringUtil.BACK_QUOTE, StringUtil.EMPTY);
-            columName = StringUtil.replace(columName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
-            Field field = new Field(columName, columnDataType.getColDataType().getDataType(),
-                    0);//感觉不需要都行,只需要名称,后续可以自己刷新
-            ddlConfig.getAddFields().add(field);
         }
-
-    }
-
-    /**
-     * 新增索引 exampleSql: ALTER TABLE test_table add index name (tmp);
-     *
-     * @param expression
-     * @param originFiledMapping
-     */
-    private void parseAddIndex(AlterExpression expression,
-            List<FieldMapping> originFiledMapping) {
-        Index index = expression.getIndex();
-        List<ColumnParams> columnNames = index.getColumns();
-        List<ColumnParams> targetNames = new LinkedList<>();
-        for (ColumnParams columnParams : columnNames) {
-            FieldMapping fieldMapping = originFiledMapping.stream()
-                    .filter(x -> StringUtil.equals(x.getSource().getName(),
-                            columnParams.getColumnName())).findFirst().get();
-            ColumnParams target = new ColumnParams(fieldMapping.getTarget().getName(),
-                    columnParams.getParams());
-            targetNames.add(target);
-        }
-        index.setColumns(targetNames);
+        ddlConfig.setDdlOperationEnum(DDLOperationEnum.ALTER_ADD);
     }
-}
+}

+ 17 - 15
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/ChangeStrategy.java

@@ -1,36 +1,38 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
 package org.dbsyncer.parser.ddl.alter;
 
 import net.sf.jsqlparser.statement.alter.AlterExpression;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ddl.AlterStrategy;
-import org.dbsyncer.parser.model.FieldMapping;
 import org.dbsyncer.sdk.config.DDLConfig;
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
 
-import java.util.List;
-
 /**
  * 解析change属性
- * exampleSql: ALTER TABLE test_table CHANGE duan1  duan2 INT(10)
+ * <code>
+ *     ALTER TABLE `test`.`test_user`
+ * CHANGE COLUMN `name` `name2` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL AFTER `id`,
+ * CHANGE COLUMN `remark` `remark2` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL AFTER `name2`
+ * </code>
  *
  * @author life
  */
 public class ChangeStrategy implements AlterStrategy {
 
     @Override
-    public void parse(AlterExpression expression, DDLConfig ddlConfig, List<FieldMapping> originalFieldMappings) {
-        String oldColumnName = StringUtil.replace(expression.getColumnOldName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
-        oldColumnName = StringUtil.replace(oldColumnName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
-        ddlConfig.setSourceColumnName(oldColumnName);
-        String finalOldColumnName = oldColumnName;
-        FieldMapping fieldMapping = originalFieldMappings.stream().filter(x -> StringUtil.equals(x.getSource().getName(),
-                finalOldColumnName)).findFirst().orElse(null);
-        if (fieldMapping != null) {
-            expression.setColumnOldName(fieldMapping.getTarget().getName());
+    public void parse(AlterExpression expression, DDLConfig ddlConfig) {
+        if (expression.getColDataTypeList() != null) {
             for (AlterExpression.ColumnDataType columnDataType : expression.getColDataTypeList()) {
-                ddlConfig.setChangedColumnName(columnDataType.getColumnName());
+                String oldColumnName = StringUtil.replace(expression.getColumnOldName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
+                oldColumnName = StringUtil.replace(oldColumnName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
+
+                String changedColumnName = StringUtil.replace(columnDataType.getColumnName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
+                changedColumnName = StringUtil.replace(changedColumnName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
+                ddlConfig.getChangedFieldNames().put(oldColumnName, changedColumnName);
             }
         }
         ddlConfig.setDdlOperationEnum(DDLOperationEnum.ALTER_CHANGE);
     }
-}
+}

+ 12 - 50
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/DropStrategy.java

@@ -1,71 +1,33 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
 package org.dbsyncer.parser.ddl.alter;
 
 import net.sf.jsqlparser.statement.alter.AlterExpression;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ddl.AlterStrategy;
-import org.dbsyncer.parser.model.FieldMapping;
 import org.dbsyncer.sdk.config.DDLConfig;
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
-import org.dbsyncer.sdk.model.Field;
-
-import java.util.List;
 
 /**
  * 解析drop
+ * <code>
+ *     ALTER TABLE `test`.`test_user`
+ * DROP COLUMN `aaa`,
+ * DROP COLUMN `bbb`
+ * </code>
  *
  * @author life
  */
 public class DropStrategy implements AlterStrategy {
 
     @Override
-    public void parse(AlterExpression expression, DDLConfig ddlConfig, List<FieldMapping> originalFieldMappings) {
+    public void parse(AlterExpression expression, DDLConfig ddlConfig) {
         if (expression.getColumnName() != null) {
-            dropColumn(expression, ddlConfig, originalFieldMappings);
-        }
-        if (expression.getIndex() != null) {
-            dropIndex(expression, originalFieldMappings);
+            String columnName = StringUtil.replace(expression.getColumnName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
+            columnName = StringUtil.replace(columnName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
+            ddlConfig.getDroppedFieldNames().add(columnName);
         }
         ddlConfig.setDdlOperationEnum(DDLOperationEnum.ALTER_DROP);
     }
-
-    /**
-     * example: ALTER TABLE test_table DROP dis;
-     *
-     * @param expression
-     * @param ddlConfig
-     * @param originalFieldMappings
-     */
-    private void dropColumn(AlterExpression expression, DDLConfig ddlConfig, List<FieldMapping> originalFieldMappings) {
-        String columnName = StringUtil.replace(expression.getColumnName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
-        columnName = StringUtil.replace(columnName,StringUtil.DOUBLE_QUOTATION,StringUtil.EMPTY);
-        Field field = new Field(columnName, null, 0);
-        //需要把列替换成目标的列名
-        String finalColumnName = columnName;
-        originalFieldMappings.stream()
-                .filter(x -> StringUtil.equals(x.getSource().getName(), finalColumnName)).findFirst()
-                .ifPresent(fieldMapping -> expression.setColumnName(fieldMapping.getTarget().getName()));
-        //加入还是原名
-        ddlConfig.getRemoveFields().add(field);
-    }
-
-    /**
-     * 貌似不需要做什么,我们目前没有字段分索引,再考虑
-     * example: ALTER TABLE test_table drop index name;
-     *
-     * @param expression
-     * @param originalFieldMappings
-     */
-    private void dropIndex(AlterExpression expression, List<FieldMapping> originalFieldMappings) {
-//        Index index = expression.getIndex();
-//        String names= index.getName();
-//        String[] nameList = StringUtil.split(names,".");
-//        List<String> targetNameList = new LinkedList<>();
-//        for (String name:nameList) {
-//            FieldMapping fieldMapping = originalFieldMappings.stream()
-//                    .filter(x -> StringUtil.equals(x.getSource().getName(),
-//                            name)).findFirst().get();
-//            targetNameList.add(fieldMapping.getTarget().getName());
-//        }
-//        index.setName(targetNameList);
-    }
 }

+ 16 - 20
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/alter/ModifyStrategy.java

@@ -1,39 +1,35 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
 package org.dbsyncer.parser.ddl.alter;
 
 import net.sf.jsqlparser.statement.alter.AlterExpression;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.ddl.AlterStrategy;
-import org.dbsyncer.parser.model.FieldMapping;
 import org.dbsyncer.sdk.config.DDLConfig;
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
 
-import java.util.List;
-
 /**
- * 解析modify的属性
- * exampleSql: ALTER TABLE `test`.`test_table` MODIFY COLUMN `test` varchar(251) NULL DEFAULT NULL
- * alter modify parser
+ * 字段属性变更
+ * <code>
+ * ALTER TABLE `test`.`test_user`
+ * MODIFY COLUMN `name` varchar(203) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL AFTER `id`,
+ * MODIFY COLUMN `remark` varchar(204) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL AFTER `name`
+ * </code>
  *
  * @author life
  */
 public class ModifyStrategy implements AlterStrategy {
 
     @Override
-    public void parse(AlterExpression expression, DDLConfig ddlConfig, List<FieldMapping> originalFieldMappings) {
-        //先查找到当前的表和目标的表对应的字段
-        for (AlterExpression.ColumnDataType columnDataType : expression.getColDataTypeList()) {
-            String columnName = StringUtil.replace(columnDataType.getColumnName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
-            columnName =StringUtil.replace(columnName,StringUtil.DOUBLE_QUOTATION,StringUtil.EMPTY);
-            for (FieldMapping fieldMapping : originalFieldMappings) {
-                if (StringUtil.equals(fieldMapping.getSource().getName(), columnName)) {
-                    //TODO life 找到目标的表名,先是alter进行属性替换,然后config记录新的
-                    columnDataType.setColumnName(fieldMapping.getTarget().getName());
-                    //因为只是修改属性,所以表名称没有变化
-                    ddlConfig.setSourceColumnName(fieldMapping.getSource().getName());
-                    ddlConfig.setChangedColumnName(fieldMapping.getSource().getName());
-                }
+    public void parse(AlterExpression expression, DDLConfig ddlConfig) {
+        if (expression.getColDataTypeList() != null) {
+            for (AlterExpression.ColumnDataType columnDataType : expression.getColDataTypeList()) {
+                String columnName = StringUtil.replace(columnDataType.getColumnName(), StringUtil.BACK_QUOTE, StringUtil.EMPTY);
+                columnName = StringUtil.replace(columnName, StringUtil.DOUBLE_QUOTATION, StringUtil.EMPTY);
+                ddlConfig.getModifiedFieldNames().add(columnName);
             }
         }
         ddlConfig.setDdlOperationEnum(DDLOperationEnum.ALTER_MODIFY);
     }
-}
+}

+ 119 - 68
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ddl/impl/DDLParserImpl.java

@@ -9,10 +9,9 @@ import net.sf.jsqlparser.statement.Statement;
 import net.sf.jsqlparser.statement.alter.Alter;
 import net.sf.jsqlparser.statement.alter.AlterExpression;
 import net.sf.jsqlparser.statement.alter.AlterOperation;
+import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.JsonUtil;
 import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.connector.base.ConnectorFactory;
-import org.dbsyncer.sdk.config.DDLConfig;
-import org.dbsyncer.sdk.connector.database.Database;
 import org.dbsyncer.parser.ddl.AlterStrategy;
 import org.dbsyncer.parser.ddl.DDLParser;
 import org.dbsyncer.parser.ddl.alter.AddStrategy;
@@ -20,27 +19,26 @@ import org.dbsyncer.parser.ddl.alter.ChangeStrategy;
 import org.dbsyncer.parser.ddl.alter.DropStrategy;
 import org.dbsyncer.parser.ddl.alter.ModifyStrategy;
 import org.dbsyncer.parser.model.FieldMapping;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.sdk.config.DDLConfig;
+import org.dbsyncer.sdk.connector.database.Database;
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
 import org.dbsyncer.sdk.model.Field;
-import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Component;
 
 import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
-import java.util.LinkedList;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.Collectors;
 
 /**
- * alter情况
- * <ol>
- *     <li>只是修改字段的属性值</li>
- *     <li>修改字段的名称</li>
- * </ol>
+ * ddl解析器, 支持类型参考:{@link DDLOperationEnum}
  *
  * @author life
  * @version 1.0.0
@@ -50,10 +48,6 @@ import java.util.concurrent.ConcurrentHashMap;
 public class DDLParserImpl implements DDLParser {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    @Resource
-    private ConnectorFactory connectorFactory;
-
     private final Map<AlterOperation, AlterStrategy> STRATEGIES = new ConcurrentHashMap();
 
     @PostConstruct
@@ -65,77 +59,134 @@ public class DDLParserImpl implements DDLParser {
     }
 
     @Override
-    public DDLConfig parseDDlConfig(String sql, String targetConnectorType, String targetTableName, List<FieldMapping> originalFieldMappings) {
-        ConnectorService connectorService = connectorFactory.getConnectorService(targetConnectorType);
-        // 替换为目标库执行SQL
+    public DDLConfig parse(ConnectorService connectorService, TableGroup tableGroup, String sql) throws JSQLParserException {
         DDLConfig ddlConfig = new DDLConfig();
-        try {
-            Statement statement = CCJSqlParserUtil.parse(sql);
-            if (statement instanceof Alter) {
-                Alter alter = (Alter) statement;
-                Database database = (Database) connectorService;
-                String quotation = database.buildSqlWithQuotation();
-                // 替换成目标表名
-                alter.getTable().setName(new StringBuilder(quotation).append(targetTableName).append(quotation).toString());
-                ddlConfig.setSql(alter.toString());
-                for (AlterExpression expression : alter.getAlterExpressions()) {
-                    if (STRATEGIES.containsKey(expression.getOperation())) {
-                        STRATEGIES.get(expression.getOperation()).parse(expression, ddlConfig, originalFieldMappings);
-                    }
-                }
-                ddlConfig.setSql(alter.toString());
+        logger.info("ddl:{}", sql);
+        Statement statement = CCJSqlParserUtil.parse(sql);
+        if (statement instanceof Alter && connectorService instanceof Database) {
+            Alter alter = (Alter) statement;
+            Database database = (Database) connectorService;
+            String quotation = database.buildSqlWithQuotation();
+            // 替换成目标表名
+            alter.getTable().setName(quotation + tableGroup.getTargetTable().getName() + quotation);
+            ddlConfig.setSql(alter.toString());
+            for (AlterExpression expression : alter.getAlterExpressions()) {
+                STRATEGIES.computeIfPresent(expression.getOperation(), (k, strategy) -> {
+                    strategy.parse(expression, ddlConfig);
+                    return strategy;
+                });
             }
-        } catch (JSQLParserException e) {
-            logger.error(e.getMessage(), e);
         }
         return ddlConfig;
     }
 
     @Override
-    public List<FieldMapping> refreshFiledMappings(List<FieldMapping> originalFieldMappings, MetaInfo originMetaInfo, MetaInfo targetMetaInfo, DDLConfig targetDDLConfig) {
-        List<FieldMapping> newTargetMappingList = new LinkedList<>();
-        //处理映射关系
-        for (FieldMapping fieldMapping : originalFieldMappings) {
-            String fieldSourceName = fieldMapping.getSource().getName();
-            String filedTargetName = fieldMapping.getTarget().getName();
-            //找到更改的源表的名称,也就是找到了对应的映射关系,这样就可以从源表找到更改后的名称进行对应,
-            if (fieldSourceName.equals(targetDDLConfig.getSourceColumnName())) {
-                // 说明字段名没有改变,只是改变了属性
-                if (targetDDLConfig.getDdlOperationEnum() == DDLOperationEnum.ALTER_MODIFY) {
-                    Field source = originMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), fieldSourceName)).findFirst().get();
-                    Field target = targetMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), filedTargetName)).findFirst().get();
-                    //替换
-                    newTargetMappingList.add(new FieldMapping(source, target));
+    public void refreshFiledMappings(TableGroup tableGroup, DDLConfig targetDDLConfig) {
+        switch (targetDDLConfig.getDdlOperationEnum()) {
+            case ALTER_MODIFY:
+                updateFieldMapping(tableGroup, targetDDLConfig.getModifiedFieldNames());
+                break;
+            case ALTER_ADD:
+                appendFieldMappings(tableGroup, targetDDLConfig.getAddedFieldNames());
+                break;
+            case ALTER_CHANGE:
+                renameFieldMapping(tableGroup, targetDDLConfig.getChangedFieldNames());
+                break;
+            case ALTER_DROP:
+                removeFieldMappings(tableGroup, targetDDLConfig.getDroppedFieldNames());
+                break;
+            default:
+                break;
+        }
+    }
+
+    private void updateFieldMapping(TableGroup tableGroup, List<String> modifiedFieldNames) {
+        Map<String, Field> sourceFiledMap = tableGroup.getSourceTable().getColumn().stream().collect(Collectors.toMap(Field::getName, filed -> filed));
+        Map<String, Field> targetFiledMap = tableGroup.getTargetTable().getColumn().stream().collect(Collectors.toMap(Field::getName, filed -> filed));
+        for (FieldMapping fieldMapping : tableGroup.getFieldMapping()) {
+            Field source = fieldMapping.getSource();
+            Field target = fieldMapping.getTarget();
+            // 支持1对多场景
+            if (source != null) {
+                String modifiedName = source.getName();
+                if (!modifiedFieldNames.contains(modifiedName)) {
                     continue;
-                } else if (targetDDLConfig.getDdlOperationEnum() == DDLOperationEnum.ALTER_CHANGE) {
-                    Field source = originMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), targetDDLConfig.getChangedColumnName())).findFirst().get();
-                    Field target = targetMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), targetDDLConfig.getChangedColumnName())).findFirst().get();
-                    //替换
-                    newTargetMappingList.add(new FieldMapping(source, target));
+                }
+                sourceFiledMap.computeIfPresent(modifiedName, (k, field) -> {
+                    fieldMapping.setSource(field);
+                    return field;
+                });
+                if (target != null && StringUtil.equals(modifiedName, target.getName())) {
+                    targetFiledMap.computeIfPresent(modifiedName, (k, field) -> {
+                        fieldMapping.setTarget(field);
+                        return field;
+                    });
+                }
+            }
+        }
+    }
+
+    private void renameFieldMapping(TableGroup tableGroup, Map<String, String> changedFieldNames) {
+        Set<String> oldNames = changedFieldNames.keySet();
+        for (FieldMapping fieldMapping : tableGroup.getFieldMapping()) {
+            Field source = fieldMapping.getSource();
+            Field target = fieldMapping.getTarget();
+            // 支持1对多场景
+            if (source != null) {
+                String oldFieldName = source.getName();
+                if (!oldNames.contains(oldFieldName)) {
                     continue;
                 }
+                changedFieldNames.computeIfPresent(oldFieldName, (k, newName) -> {
+                    source.setName(newName);
+                    if (target != null && StringUtil.equals(oldFieldName, target.getName())) {
+                        target.setName(newName);
+                    }
+                    return newName;
+                });
             }
-            newTargetMappingList.add(fieldMapping);
         }
+    }
 
-        if (DDLOperationEnum.ALTER_ADD == targetDDLConfig.getDdlOperationEnum()) {
-            //处理新增的映射关系
-            List<Field> addFields = targetDDLConfig.getAddFields();
-            for (Field field : addFields) {
-                Field source = originMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), field.getName())).findFirst().get();
-                Field target = targetMetaInfo.getColumn().stream().filter(x -> StringUtil.equals(x.getName(), field.getName())).findFirst().get();
-                newTargetMappingList.add(new FieldMapping(source, target));
+    private void removeFieldMappings(TableGroup tableGroup, List<String> removeFieldNames) {
+        Iterator<FieldMapping> iterator = tableGroup.getFieldMapping().iterator();
+        while (iterator.hasNext()) {
+            FieldMapping fieldMapping = iterator.next();
+            Field source = fieldMapping.getSource();
+            if (source != null && removeFieldNames.contains(source.getName())) {
+                iterator.remove();
             }
         }
+    }
 
-        if (DDLOperationEnum.ALTER_DROP == targetDDLConfig.getDdlOperationEnum()) {
-            //处理删除字段的映射关系
-            List<Field> removeFields = targetDDLConfig.getRemoveFields();
-            for (Field field : removeFields) {
-                newTargetMappingList.removeIf(x -> StringUtil.equals(x.getSource().getName(), field.getName()));
+    private void appendFieldMappings(TableGroup tableGroup, List<String> addedFieldNames) {
+        List<FieldMapping> fieldMappings = tableGroup.getFieldMapping();
+        Iterator<String> iterator = addedFieldNames.iterator();
+        while (iterator.hasNext()) {
+            String name = iterator.next();
+            for (FieldMapping fieldMapping : fieldMappings) {
+                Field source = fieldMapping.getSource();
+                Field target = fieldMapping.getTarget();
+                // 检查重复字段
+                if (source != null && target != null && StringUtil.equals(source.getName(), name) && StringUtil.equals(target.getName(), name)) {
+                    iterator.remove();
+                }
             }
         }
-        return newTargetMappingList;
+        if (CollectionUtils.isEmpty(addedFieldNames)) {
+            return;
+        }
+
+        Map<String, Field> sourceFiledMap = tableGroup.getSourceTable().getColumn().stream().collect(Collectors.toMap(Field::getName, filed -> filed));
+        Map<String, Field> targetFiledMap = tableGroup.getTargetTable().getColumn().stream().collect(Collectors.toMap(Field::getName, filed -> filed));
+        if (CollectionUtils.isEmpty(sourceFiledMap) || CollectionUtils.isEmpty(targetFiledMap)) {
+            return;
+        }
+        addedFieldNames.forEach(newFieldName -> {
+            if (sourceFiledMap.containsKey(newFieldName) && targetFiledMap.containsKey(newFieldName)) {
+                fieldMappings.add(new FieldMapping(sourceFiledMap.get(newFieldName), targetFiledMap.get(newFieldName)));
+            }
+        });
     }
 
 }

+ 0 - 9
dbsyncer-parser/src/main/java/org/dbsyncer/parser/enums/MetaEnum.java

@@ -32,15 +32,6 @@ public enum MetaEnum {
         this.message = message;
     }
 
-    public static MetaEnum getMetaEnum(int code) throws ParserException {
-        for (MetaEnum e : MetaEnum.values()) {
-            if (code == e.getCode()) {
-                return e;
-            }
-        }
-        throw new ParserException(String.format("Meta code \"%s\" does not exist.", code));
-    }
-
     public static boolean isRunning(int state) {
         return RUNNING.getCode() == state || STOPPING.getCode() == state;
     }

+ 5 - 5
dbsyncer-parser/src/main/java/org/dbsyncer/parser/event/RefreshOffsetEvent.java

@@ -15,7 +15,7 @@ import java.util.List;
  */
 public final class RefreshOffsetEvent extends ApplicationContextEvent {
 
-    private List<ChangedOffset> offsetList;
+    private final ChangedOffset changedOffset;
 
     /**
      * Create a new ContextStartedEvent.
@@ -23,12 +23,12 @@ public final class RefreshOffsetEvent extends ApplicationContextEvent {
      * @param source the {@code ApplicationContext} that the event is raised for
      *               (must not be {@code null})
      */
-    public RefreshOffsetEvent(ApplicationContext source, List<ChangedOffset> offsetList) {
+    public RefreshOffsetEvent(ApplicationContext source, ChangedOffset changedOffset) {
         super(source);
-        this.offsetList = offsetList;
+        this.changedOffset = changedOffset;
     }
 
-    public List<ChangedOffset> getOffsetList() {
-        return offsetList;
+    public ChangedOffset getChangedOffset() {
+        return changedOffset;
     }
 }

+ 19 - 10
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferActuator.java

@@ -7,6 +7,7 @@ import org.dbsyncer.common.config.BufferActuatorConfig;
 import org.dbsyncer.common.metric.TimeRegistry;
 import org.dbsyncer.common.scheduled.ScheduledTaskJob;
 import org.dbsyncer.common.scheduled.ScheduledTaskService;
+import org.dbsyncer.parser.ParserException;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.enums.MetaEnum;
 import org.dbsyncer.parser.model.Meta;
@@ -17,11 +18,12 @@ import org.springframework.util.Assert;
 import javax.annotation.Resource;
 import java.lang.reflect.ParameterizedType;
 import java.time.Instant;
-import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Queue;
 import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
@@ -80,7 +82,7 @@ public abstract class AbstractBufferActuator<Request extends BufferRequest, Resp
      */
     protected void buildQueueConfig() {
         taskLock = new ReentrantLock();
-        this.queue = new LinkedBlockingQueue(config.getBufferQueueCapacity());
+        this.queue = new LinkedBlockingQueue<>(config.getBufferQueueCapacity());
         logger.info("{} initialized with queue capacity: {}", this.getClass().getSimpleName(), config.getBufferQueueCapacity());
     }
 
@@ -106,7 +108,7 @@ public abstract class AbstractBufferActuator<Request extends BufferRequest, Resp
      * @param request
      * @return
      */
-    protected boolean isRunning(BufferRequest request) {
+    public boolean isRunning(BufferRequest request) {
         Meta meta = profileComponent.getMeta(request.getMetaId());
         return meta != null && MetaEnum.isRunning(meta.getState());
     }
@@ -178,7 +180,7 @@ public abstract class AbstractBufferActuator<Request extends BufferRequest, Resp
     public void run() {
         boolean locked = false;
         try {
-            locked = taskLock.tryLock();
+            locked = taskLock.tryLock(3, TimeUnit.SECONDS);
             if (locked) {
                 submit();
             }
@@ -201,20 +203,26 @@ public abstract class AbstractBufferActuator<Request extends BufferRequest, Resp
         return config.getBufferQueueCapacity();
     }
 
-    private void submit() throws IllegalAccessException, InstantiationException {
+    private void submit() {
         if (queue.isEmpty()) {
             return;
         }
 
         AtomicLong batchCounter = new AtomicLong();
-        Map<String, Response> map = new LinkedHashMap<>();
+        Map<String, Response> map = new ConcurrentHashMap<>();
         while (!queue.isEmpty() && batchCounter.get() < config.getBufferPullCount()) {
             Request poll = queue.poll();
             String key = getPartitionKey(poll);
-            if (!map.containsKey(key)) {
-                map.putIfAbsent(key, responseClazz.newInstance());
-            }
-            Response response = map.get(key);
+            Response response = map.compute(key, (k,v) -> {
+                if (v == null) {
+                    try {
+                        return responseClazz.newInstance();
+                    } catch (Exception e) {
+                        throw new ParserException(e);
+                    }
+                }
+                return v;
+            });
             partition(poll, response);
             batchCounter.incrementAndGet();
 
@@ -234,4 +242,5 @@ public abstract class AbstractBufferActuator<Request extends BufferRequest, Resp
     public void setConfig(BufferActuatorConfig config) {
         this.config = config;
     }
+
 }

+ 60 - 27
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/BufferActuatorRouter.java

@@ -4,9 +4,11 @@
 package org.dbsyncer.parser.flush.impl;
 
 import org.dbsyncer.common.config.TableGroupBufferConfig;
-import org.dbsyncer.sdk.listener.ChangedEvent;
-import org.dbsyncer.parser.flush.BufferActuator;
+import org.dbsyncer.parser.flush.AbstractBufferActuator;
+import org.dbsyncer.parser.model.TableGroup;
 import org.dbsyncer.parser.model.WriterRequest;
+import org.dbsyncer.sdk.enums.ChangedEventTypeEnum;
+import org.dbsyncer.sdk.listener.ChangedEvent;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.DisposableBean;
@@ -14,8 +16,10 @@ import org.springframework.stereotype.Component;
 
 import javax.annotation.Resource;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -37,47 +41,76 @@ public final class BufferActuatorRouter implements DisposableBean {
     private TableGroupBufferActuator tableGroupBufferActuator;
 
     @Resource
-    private BufferActuator generalBufferActuator;
+    private GeneralBufferActuator generalBufferActuator;
 
     /**
      * 驱动缓存执行路由列表
      */
     private final Map<String, Map<String, TableGroupBufferActuator>> router = new ConcurrentHashMap<>();
 
-    public void execute(String metaId, String tableGroupId, ChangedEvent event) {
-        if (router.containsKey(metaId) && router.get(metaId).containsKey(tableGroupId)) {
-            router.get(metaId).get(tableGroupId).offer(new WriterRequest(tableGroupId, event));
+    public void execute(String metaId, ChangedEvent event) {
+        if (router.containsKey(metaId)) {
+            router.computeIfPresent(metaId, (k, processor) -> {
+                processor.computeIfPresent(event.getSourceTableName(), (x, actuator) -> {
+                    offer(actuator, event);
+                    return actuator;
+                });
+                return processor;
+            });
             return;
         }
-        generalBufferActuator.offer(new WriterRequest(tableGroupId, event));
+        offer(generalBufferActuator, event);
     }
 
-    public void bind(String metaId, String tableGroupId) {
-        router.computeIfAbsent(metaId, k -> new ConcurrentHashMap<>());
-
-        // TODO 暂定执行器上限,待替换为LRU模型
-        if (router.get(metaId).size() >= tableGroupBufferConfig.getMaxBufferActuatorSize()) {
-            return;
-        }
-
-        router.get(metaId).computeIfAbsent(tableGroupId, k -> {
-            TableGroupBufferActuator newBufferActuator = null;
-            try {
-                newBufferActuator = (TableGroupBufferActuator) tableGroupBufferActuator.clone();
-                newBufferActuator.setTableGroupId(tableGroupId);
-                newBufferActuator.buildConfig();
-            } catch (CloneNotSupportedException ex) {
-                logger.error(ex.getMessage(), ex);
+    public void bind(String metaId, List<TableGroup> tableGroups) {
+        router.computeIfAbsent(metaId, k -> {
+            Map<String, TableGroupBufferActuator> processor = new ConcurrentHashMap<>();
+            for (TableGroup tableGroup : tableGroups) {
+                // 超过执行器上限
+                if (processor.size() >= tableGroupBufferConfig.getMaxBufferActuatorSize()) {
+                    break;
+                }
+                final String tableName = tableGroup.getSourceTable().getName();
+                processor.computeIfAbsent(tableName, name -> {
+                    TableGroupBufferActuator newBufferActuator = null;
+                    try {
+                        newBufferActuator = (TableGroupBufferActuator) tableGroupBufferActuator.clone();
+                        newBufferActuator.setTableName(name);
+                        newBufferActuator.buildConfig();
+                    } catch (CloneNotSupportedException ex) {
+                        logger.error(ex.getMessage(), ex);
+                    }
+                    return newBufferActuator;
+                });
             }
-            return newBufferActuator;
+            return processor;
         });
     }
 
     public void unbind(String metaId) {
-        if (router.containsKey(metaId)) {
-            router.get(metaId).values().forEach(TableGroupBufferActuator::stop);
-            router.remove(metaId);
+        router.computeIfPresent(metaId, (k, processor) -> {
+            processor.values().forEach(TableGroupBufferActuator::stop);
+            return null;
+        });
+    }
+
+    private void offer(AbstractBufferActuator actuator, ChangedEvent event) {
+        if (ChangedEventTypeEnum.isDDL(event.getType())) {
+            WriterRequest request = new WriterRequest(event);
+            // DDL事件,阻塞等待队列消费完成
+            while (actuator.isRunning(request)) {
+                if (actuator.getQueue().isEmpty()) {
+                    actuator.offer(request);
+                    return;
+                }
+                try {
+                    TimeUnit.MILLISECONDS.sleep(10);
+                } catch (InterruptedException ex) {
+                    logger.error(ex.getMessage(), ex);
+                }
+            }
         }
+        actuator.offer(new WriterRequest(event));
     }
 
     @Override

+ 118 - 92
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/GeneralBufferActuator.java

@@ -12,13 +12,20 @@ import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.connector.base.ConnectorFactory;
 import org.dbsyncer.parser.ParserComponent;
 import org.dbsyncer.parser.ProfileComponent;
+import org.dbsyncer.parser.TableGroupContext;
 import org.dbsyncer.parser.ddl.DDLParser;
 import org.dbsyncer.parser.event.RefreshOffsetEvent;
 import org.dbsyncer.parser.flush.AbstractBufferActuator;
-import org.dbsyncer.parser.model.*;
+import org.dbsyncer.parser.model.Connector;
+import org.dbsyncer.parser.model.FieldMapping;
+import org.dbsyncer.parser.model.Mapping;
+import org.dbsyncer.parser.model.Meta;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.TableGroupPicker;
+import org.dbsyncer.parser.model.WriterRequest;
+import org.dbsyncer.parser.model.WriterResponse;
 import org.dbsyncer.parser.strategy.FlushStrategy;
 import org.dbsyncer.parser.util.ConvertUtil;
-import org.dbsyncer.parser.util.PickerUtil;
 import org.dbsyncer.plugin.PluginFactory;
 import org.dbsyncer.plugin.enums.ProcessEnum;
 import org.dbsyncer.plugin.impl.IncrementPluginContext;
@@ -26,7 +33,10 @@ import org.dbsyncer.sdk.config.DDLConfig;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
 import org.dbsyncer.sdk.enums.ChangedEventTypeEnum;
 import org.dbsyncer.sdk.model.ConnectorConfig;
+import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.model.MetaInfo;
+import org.dbsyncer.sdk.schema.SchemaResolver;
+import org.dbsyncer.sdk.spi.ConnectorService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.context.ApplicationContext;
@@ -35,10 +45,12 @@ import org.springframework.util.Assert;
 
 import javax.annotation.PostConstruct;
 import javax.annotation.Resource;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.Executor;
+import java.util.stream.Collectors;
 
 /**
  * 通用执行器(单线程消费,多线程批量写,按序执行)
@@ -65,7 +77,7 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
     private ParserComponent parserComponent;
 
     @Resource
-    protected ProfileComponent profileComponent;
+    private ProfileComponent profileComponent;
 
     @Resource
     private PluginFactory pluginFactory;
@@ -79,6 +91,9 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
     @Resource
     private DDLParser ddlParser;
 
+    @Resource
+    private TableGroupContext tableGroupContext;
+
     @PostConstruct
     public void init() {
         setConfig(generalBufferConfig);
@@ -87,7 +102,7 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
 
     @Override
     protected String getPartitionKey(WriterRequest request) {
-        return request.getTableGroupId();
+        return request.getTableName();
     }
 
     @Override
@@ -96,10 +111,10 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
             response.addData(request.getRow());
         }
         if (request.getChangedOffset() != null) {
-            response.addChangedOffset(request.getChangedOffset());
+            response.setChangedOffset(request.getChangedOffset());
         }
         if (!response.isMerged()) {
-            response.setTableGroupId(request.getTableGroupId());
+            response.setTableName(request.getTableName());
             response.setEvent(request.getEvent());
             response.setTypeEnum(request.getTypeEnum());
             response.setSql(request.getSql());
@@ -116,54 +131,32 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
 
     @Override
     public void pull(WriterResponse response) {
-        // 0、获取配置信息
-        final TableGroup tableGroup = getTableGroup(response.getTableGroupId());
-        final Mapping mapping = profileComponent.getMapping(tableGroup.getMappingId());
-        final TableGroup group = PickerUtil.mergeTableGroupConfig(mapping, tableGroup);
-
-        // 1、ddl解析
-        if (ChangedEventTypeEnum.isDDL(response.getTypeEnum())) {
-            parseDDl(response, mapping, group);
+        Meta meta = profileComponent.getMeta(response.getChangedOffset().getMetaId());
+        if (meta == null) {
             return;
         }
-
-        final Picker picker = new Picker(group.getFieldMapping());
-        final List<Map> sourceDataList = response.getDataList();
-        // 2、映射字段
-        List<Map> targetDataList = picker.pickTargetData(sourceDataList);
-
-        // 3、参数转换
-        ConvertUtil.convert(group.getConvert(), targetDataList);
-
-        // 4、插件转换
-        final IncrementPluginContext context = new IncrementPluginContext();
-        context.setSourceConnectorInstance(connectorFactory.connect(getConnectorConfig(mapping.getSourceConnectorId())));
-        context.setTargetConnectorInstance(connectorFactory.connect(getConnectorConfig(mapping.getTargetConnectorId())));
-        context.setSourceTableName(group.getSourceTable().getName());
-        context.setTargetTableName(group.getTargetTable().getName());
-        context.setEvent(response.getEvent());
-        context.setTargetFields(picker.getTargetFields());
-        context.setCommand(group.getCommand());
-        context.setBatchSize(generalBufferConfig.getBufferWriterCount());
-        context.setSourceList(sourceDataList);
-        context.setTargetList(targetDataList);
-        context.setPluginExtInfo(group.getPluginExtInfo());
-        context.setForceUpdate(mapping.isForceUpdate());
-        pluginFactory.process(group.getPlugin(), context, ProcessEnum.CONVERT);
-
-        // 5、批量执行同步
-        Result result = parserComponent.writeBatch(context, getExecutor());
-
-        // 6.发布刷新增量点事件
-        applicationContext.publishEvent(new RefreshOffsetEvent(applicationContext, response.getOffsetList()));
-
-        // 7、持久化同步结果
-        result.setTableGroupId(tableGroup.getId());
-        result.setTargetTableGroupName(context.getTargetTableName());
-        flushStrategy.flushIncrementData(mapping.getMetaId(), result, response.getEvent());
-
-        // 8、执行批量处理后的
-        pluginFactory.process(group.getPlugin(), context, ProcessEnum.AFTER);
+        final Mapping mapping = profileComponent.getMapping(meta.getMappingId());
+        List<TableGroupPicker> pickers = tableGroupContext.getTableGroupPickers(meta.getId(), response.getTableName());
+
+        switch (response.getTypeEnum()) {
+            case DDL:
+                tableGroupContext.update(mapping, pickers.stream().map(picker -> {
+                    TableGroup tableGroup = profileComponent.getTableGroup(picker.getTableGroup().getId());
+                    parseDDl(response, mapping, tableGroup);
+                    return tableGroup;
+                }).collect(Collectors.toList()));
+                break;
+            case SCAN:
+                pickers.forEach(picker -> distributeTableGroup(response, mapping, picker, picker.getSourceFields(), false));
+                break;
+            case ROW:
+                pickers.forEach(picker -> distributeTableGroup(response, mapping, picker, picker.getTableGroup().getSourceTable().getColumn(), true));
+                // 发布刷新增量点事件
+                applicationContext.publishEvent(new RefreshOffsetEvent(applicationContext, response.getChangedOffset()));
+                break;
+            default:
+                break;
+        }
     }
 
     @Override
@@ -182,16 +175,54 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
         return generalExecutor;
     }
 
-    public TableGroup getTableGroup(String tableGroupId) {
-        return profileComponent.getTableGroup(tableGroupId);
+    private void distributeTableGroup(WriterResponse response, Mapping mapping, TableGroupPicker tableGroupPicker, List<Field> sourceFields, boolean enableFilter) {
+        // 1、映射字段
+        boolean enableSchemaResolver = profileComponent.getSystemConfig().isEnableSchemaResolver();
+        ConnectorConfig sourceConfig = getConnectorConfig(mapping.getSourceConnectorId());
+        ConnectorService sourceConnector = connectorFactory.getConnectorService(sourceConfig.getConnectorType());
+        List<Map> sourceDataList = new ArrayList<>();
+        List<Map> targetDataList = tableGroupPicker.getPicker()
+                .setSourceResolver(enableSchemaResolver ? sourceConnector.getSchemaResolver() : null)
+                .pickTargetData(sourceFields, enableFilter, response.getDataList(), sourceDataList);
+        if (CollectionUtils.isEmpty(targetDataList)) {
+            return;
+        }
+
+        // 2、参数转换
+        TableGroup tableGroup = tableGroupPicker.getTableGroup();
+        ConvertUtil.convert(tableGroup.getConvert(), targetDataList);
+
+        // 3、插件转换
+        final IncrementPluginContext context = new IncrementPluginContext();
+        context.setSourceConnectorInstance(connectorFactory.connect(sourceConfig));
+        context.setTargetConnectorInstance(connectorFactory.connect(getConnectorConfig(mapping.getTargetConnectorId())));
+        context.setSourceTableName(tableGroup.getSourceTable().getName());
+        context.setTargetTableName(tableGroup.getTargetTable().getName());
+        context.setEvent(response.getEvent());
+        context.setTargetFields(tableGroupPicker.getTargetFields());
+        context.setCommand(tableGroup.getCommand());
+        context.setBatchSize(generalBufferConfig.getBufferWriterCount());
+        context.setSourceList(sourceDataList);
+        context.setTargetList(targetDataList);
+        context.setPluginExtInfo(tableGroup.getPluginExtInfo());
+        context.setForceUpdate(mapping.isForceUpdate());
+        context.setEnableSchemaResolver(enableSchemaResolver);
+        pluginFactory.process(tableGroup.getPlugin(), context, ProcessEnum.CONVERT);
+
+        // 4、批量执行同步
+        Result result = parserComponent.writeBatch(context, getExecutor());
+
+        // 5、持久化同步结果
+        result.setTableGroupId(tableGroup.getId());
+        result.setTargetTableGroupName(context.getTargetTableName());
+        flushStrategy.flushIncrementData(mapping.getMetaId(), result, response.getEvent());
+
+        // 6、执行后置处理
+        pluginFactory.process(tableGroup.getPlugin(), context, ProcessEnum.AFTER);
     }
 
     /**
      * 解析DDL
-     *
-     * @param response
-     * @param mapping
-     * @param tableGroup
      */
     private void parseDDl(WriterResponse response, Mapping mapping, TableGroup tableGroup) {
         try {
@@ -200,49 +231,44 @@ public class GeneralBufferActuator extends AbstractBufferActuator<WriterRequest,
             String sConnType = sConnConfig.getConnectorType();
             String tConnType = tConnConfig.getConnectorType();
             // 0.生成目标表执行SQL(暂支持同源)
-            if (StringUtil.equals(sConnType, tConnType)) {
-                // 1.转换为目标SQL,执行到目标库
-                String targetTableName = tableGroup.getTargetTable().getName();
-                List<FieldMapping> originalFieldMappings = tableGroup.getFieldMapping();
-                DDLConfig targetDDLConfig = ddlParser.parseDDlConfig(response.getSql(), tConnType, targetTableName, originalFieldMappings);
-                final ConnectorInstance tConnectorInstance = connectorFactory.connect(tConnConfig);
-                Result result = connectorFactory.writerDDL(tConnectorInstance, targetDDLConfig);
-                result.setTableGroupId(tableGroup.getId());
-                result.setTargetTableGroupName(targetTableName);
-
-                // 2.获取目标表最新的属性字段
-                MetaInfo targetMetaInfo = parserComponent.getMetaInfo(mapping.getTargetConnectorId(), targetTableName);
-                MetaInfo originMetaInfo = parserComponent.getMetaInfo(mapping.getSourceConnectorId(), tableGroup.getSourceTable().getName());
-
-                // 3.更新表字段映射(根据保留的更改的属性,进行更改)
-                tableGroup.getSourceTable().setColumn(originMetaInfo.getColumn());
-                tableGroup.getTargetTable().setColumn(targetMetaInfo.getColumn());
-                tableGroup.setFieldMapping(ddlParser.refreshFiledMappings(originalFieldMappings, originMetaInfo, targetMetaInfo, targetDDLConfig));
-
-                // 4.更新执行命令
-                Map<String, String> commands = parserComponent.getCommand(mapping, tableGroup);
-                tableGroup.setCommand(commands);
-
-                // 5.持久化存储 & 更新缓存配置
-                profileComponent.editTableGroup(tableGroup);
-
-                // 6.发布更新事件,持久化增量数据
-                applicationContext.publishEvent(new RefreshOffsetEvent(applicationContext, response.getOffsetList()));
-                flushStrategy.flushIncrementData(mapping.getMetaId(), result, response.getEvent());
+            if (!StringUtil.equals(sConnType, tConnType)) {
+                logger.warn("暂只支持数据库同源并且是关系性解析DDL");
                 return;
             }
+            // 1.转换为目标SQL,执行到目标库
+            String targetTableName = tableGroup.getTargetTable().getName();
+            ConnectorService connectorService = connectorFactory.getConnectorService(tConnType);
+            DDLConfig targetDDLConfig = ddlParser.parse(connectorService, tableGroup, response.getSql());
+            ConnectorInstance tConnectorInstance = connectorFactory.connect(tConnConfig);
+            Result result = connectorFactory.writerDDL(tConnectorInstance, targetDDLConfig);
+            result.setTableGroupId(tableGroup.getId());
+            result.setTargetTableGroupName(targetTableName);
+
+            // 2.获取目标表最新的属性字段
+            MetaInfo sourceMetaInfo = parserComponent.getMetaInfo(mapping.getSourceConnectorId(), tableGroup.getSourceTable().getName());
+            MetaInfo targetMetaInfo = parserComponent.getMetaInfo(mapping.getTargetConnectorId(), targetTableName);
+
+            // 3.更新表字段映射(根据保留的更改的属性,进行更改)
+            tableGroup.getSourceTable().setColumn(sourceMetaInfo.getColumn());
+            tableGroup.getTargetTable().setColumn(targetMetaInfo.getColumn());
+            ddlParser.refreshFiledMappings(tableGroup, targetDDLConfig);
+
+            // 4.更新执行命令
+            tableGroup.setCommand(parserComponent.getCommand(mapping, tableGroup));
+
+            // 5.持久化存储 & 更新缓存配置
+            profileComponent.editTableGroup(tableGroup);
+
+            // 6.发布更新事件,持久化增量数据
+            applicationContext.publishEvent(new RefreshOffsetEvent(applicationContext, response.getChangedOffset()));
+            flushStrategy.flushIncrementData(mapping.getMetaId(), result, response.getEvent());
         } catch (Exception e) {
             logger.error(e.getMessage(), e);
-            return;
         }
-        logger.warn("暂只支持数据库同源并且是关系性解析DDL");
     }
 
     /**
      * 获取连接器配置
-     *
-     * @param connectorId
-     * @return
      */
     private ConnectorConfig getConnectorConfig(String connectorId) {
         Assert.hasText(connectorId, "Connector id can not be empty.");

+ 7 - 7
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/TableGroupBufferActuator.java

@@ -34,7 +34,7 @@ public class TableGroupBufferActuator extends GeneralBufferActuator implements C
 
     private String taskKey;
 
-    private String tableGroupId;
+    private String tableName;
 
     private volatile boolean running;
 
@@ -44,7 +44,7 @@ public class TableGroupBufferActuator extends GeneralBufferActuator implements C
     }
 
     @Override
-    protected boolean isRunning(BufferRequest request) {
+    public boolean isRunning(BufferRequest request) {
         return running;
     }
 
@@ -61,7 +61,7 @@ public class TableGroupBufferActuator extends GeneralBufferActuator implements C
         int coreSize = tableGroupBufferConfig.getThreadCoreSize();
         int maxSize = tableGroupBufferConfig.getMaxThreadSize();
         int queueCapacity = tableGroupBufferConfig.getThreadQueueCapacity();
-        String threadNamePrefix = "TableGroupExecutor-" + tableGroupId + StringUtil.SYMBOL;
+        String threadNamePrefix = "TableGroupExecutor-" + tableName + StringUtil.SYMBOL + tableName.hashCode() + StringUtil.SYMBOL;
         threadPoolTaskExecutor = ThreadPoolUtil.newThreadPoolTaskExecutor(coreSize, maxSize, queueCapacity, 30, threadNamePrefix);
         running = true;
         scheduledTaskService.start(taskKey, tableGroupBufferConfig.getBufferPeriodMillisecond(), this);
@@ -80,11 +80,11 @@ public class TableGroupBufferActuator extends GeneralBufferActuator implements C
         scheduledTaskService.stop(taskKey);
     }
 
-    public String getTableGroupId() {
-        return tableGroupId;
+    public String getTableName() {
+        return tableName;
     }
 
-    public void setTableGroupId(String tableGroupId) {
-        this.tableGroupId = tableGroupId;
+    public void setTableName(String tableName) {
+        this.tableName = tableName;
     }
 }

+ 3 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/CacheServiceImpl.java

@@ -15,7 +15,7 @@ import java.util.concurrent.ConcurrentHashMap;
 @Component
 public class CacheServiceImpl implements CacheService {
 
-    private Map<String, Object> cache = new ConcurrentHashMap<>();
+    private final Map<String, Object> cache = new ConcurrentHashMap<>();
 
     @Override
     public Object put(String key, Object value) {
@@ -23,8 +23,8 @@ public class CacheServiceImpl implements CacheService {
     }
 
     @Override
-    public Object putIfAbsent(String key, Object value) {
-        return cache.putIfAbsent(key, value);
+    public Map<String, Object> getCache() {
+        return cache;
     }
 
     @Override

+ 35 - 61
dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/OperationTemplate.java

@@ -3,7 +3,6 @@
  */
 package org.dbsyncer.parser.impl;
 
-import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.CacheService;
 import org.dbsyncer.parser.ParserException;
@@ -11,6 +10,7 @@ import org.dbsyncer.parser.command.impl.PersistenceCommand;
 import org.dbsyncer.parser.enums.CommandEnum;
 import org.dbsyncer.parser.enums.GroupStrategyEnum;
 import org.dbsyncer.parser.model.ConfigModel;
+import org.dbsyncer.parser.model.Group;
 import org.dbsyncer.parser.model.OperationConfig;
 import org.dbsyncer.parser.model.QueryConfig;
 import org.dbsyncer.parser.strategy.GroupStrategy;
@@ -24,10 +24,9 @@ import org.springframework.util.Assert;
 
 import javax.annotation.Resource;
 import java.util.ArrayList;
-import java.util.Collections;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  * 操作配置模板
@@ -58,28 +57,30 @@ public final class OperationTemplate {
 
     public <T> List<T> queryAll(QueryConfig<T> query) {
         String groupId = getGroupId(query.getConfigModel(), query.getGroupStrategyEnum());
-        Group group = cacheService.get(groupId, Group.class);
-        if (null != group) {
-            List<String> index = group.getIndex();
-            if (!CollectionUtils.isEmpty(index)) {
-                List<T> list = new ArrayList<>();
-                index.forEach(e -> {
-                    Object v = cacheService.get(e);
-                    if (null != v) {
-                        list.add((T) v);
-                    }
-                });
-                return list;
-            }
-        }
-        return Collections.EMPTY_LIST;
+        List<T> list = new ArrayList<>();
+        cacheService.getCache().computeIfPresent(groupId, (k, v) -> {
+            Group group = (Group) v;
+            group.getIndex().forEach(id ->
+                cacheService.getCache().computeIfPresent(id, (x,y) -> {
+                    list.add((T) y);
+                    return y;
+                })
+            );
+            return group;
+        });
+        return list;
     }
 
     public int queryCount(QueryConfig query) {
         ConfigModel model = query.getConfigModel();
         String groupId = getGroupId(model, query.getGroupStrategyEnum());
-        Group group = cacheService.get(groupId, Group.class);
-        return null != group ? group.getIndex().size() : 0;
+        AtomicInteger count = new AtomicInteger();
+        cacheService.getCache().computeIfPresent(groupId, (k, v) -> {
+            Group group = (Group) v;
+            count.set(group.size());
+            return group;
+        });
+        return count.get();
     }
 
     public <T> T queryObject(Class<T> clazz, String id) {
@@ -114,10 +115,16 @@ public final class OperationTemplate {
 
         // 2、分组
         String groupId = getGroupId(model, strategy);
-        cacheService.putIfAbsent(groupId, new Group());
-        Group group = cacheService.get(groupId, Group.class);
-        group.addIfAbsent(id);
-        logger.debug("Put the model [{}] for {} group into cache.", id, groupId);
+        cacheService.getCache().compute(groupId, (k, v) -> {
+            Group group = (Group) v;
+            if (group == null) {
+                group = new Group();
+            }
+            if (!group.contains(id)) {
+                group.add(id);
+            }
+            return group;
+        });
     }
 
     public void remove(OperationConfig config) {
@@ -126,13 +133,11 @@ public final class OperationTemplate {
         // 删除分组
         ConfigModel model = cacheService.get(id, ConfigModel.class);
         String groupId = getGroupId(model, config.getGroupStrategyEnum());
-        Group group = cacheService.get(groupId, Group.class);
-        if (null != group) {
+        cacheService.getCache().computeIfPresent(groupId, (k, v) -> {
+            Group group = (Group) v;
             group.remove(id);
-            if (0 >= group.size()) {
-                cacheService.remove(groupId);
-            }
-        }
+            return group.isEmpty() ? null : group;
+        });
         cacheService.remove(id);
         storageService.remove(StorageEnum.CONFIG, id);
     }
@@ -148,35 +153,4 @@ public final class OperationTemplate {
         return groupId;
     }
 
-    public class Group {
-
-        private List<String> index;
-
-        public Group() {
-            this.index = new LinkedList<>();
-        }
-
-        public synchronized void addIfAbsent(String e) {
-            if (!index.contains(e)) {
-                index.add(e);
-            }
-        }
-
-        public synchronized void remove(String e) {
-            index.remove(e);
-        }
-
-        public int size() {
-            return index.size();
-        }
-
-        public List<String> getIndex() {
-            return Collections.unmodifiableList(index);
-        }
-
-        public void setIndex(List<String> index) {
-            this.index = index;
-        }
-    }
-
 }

+ 39 - 39
dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/ParserComponentImpl.java

@@ -10,7 +10,12 @@ import org.dbsyncer.connector.base.ConnectorFactory;
 import org.dbsyncer.parser.ParserComponent;
 import org.dbsyncer.parser.ProfileComponent;
 import org.dbsyncer.parser.event.FullRefreshEvent;
-import org.dbsyncer.parser.model.*;
+import org.dbsyncer.parser.model.Connector;
+import org.dbsyncer.parser.model.FieldMapping;
+import org.dbsyncer.parser.model.Mapping;
+import org.dbsyncer.parser.model.Picker;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.Task;
 import org.dbsyncer.parser.strategy.FlushStrategy;
 import org.dbsyncer.parser.util.ConvertUtil;
 import org.dbsyncer.parser.util.PickerUtil;
@@ -18,14 +23,14 @@ import org.dbsyncer.plugin.PluginFactory;
 import org.dbsyncer.plugin.enums.ProcessEnum;
 import org.dbsyncer.plugin.impl.FullPluginContext;
 import org.dbsyncer.sdk.config.CommandConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
 import org.dbsyncer.sdk.constant.ConnectorConstant;
 import org.dbsyncer.sdk.model.ConnectorConfig;
-import org.dbsyncer.sdk.model.Field;
 import org.dbsyncer.sdk.model.MetaInfo;
 import org.dbsyncer.sdk.model.Table;
 import org.dbsyncer.sdk.plugin.PluginContext;
+import org.dbsyncer.sdk.schema.SchemaResolver;
+import org.dbsyncer.sdk.spi.ConnectorService;
 import org.dbsyncer.sdk.util.PrimaryKeyUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -40,6 +45,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.Executor;
+import java.util.stream.Collectors;
 
 /**
  * @author AE86
@@ -133,7 +139,7 @@ public class ParserComponentImpl implements ParserComponent {
         String tTableName = group.getTargetTable().getName();
         Assert.notEmpty(fieldMapping, String.format("数据源表[%s]同步到目标源表[%s], 映射关系不能为空.", sTableName, tTableName));
         // 获取同步字段
-        Picker picker = new Picker(fieldMapping);
+        Picker picker = new Picker(group);
         List<String> primaryKeys = PrimaryKeyUtil.findTablePrimaryKeys(sourceTable);
         final FullPluginContext context = new FullPluginContext();
         context.setSourceConnectorInstance(connectorFactory.connect(sConfig));
@@ -149,6 +155,9 @@ public class ParserComponentImpl implements ParserComponent {
         context.setTargetFields(picker.getTargetFields());
         context.setSupportedCursor(StringUtil.isNotBlank(command.get(ConnectorConstant.OPERTION_QUERY_CURSOR)));
         context.setPageSize(mapping.getReadNum());
+        context.setEnableSchemaResolver(profileComponent.getSystemConfig().isEnableSchemaResolver());
+        ConnectorService sourceConnector = connectorFactory.getConnectorService(context.getSourceConnectorInstance().getConfig());
+        picker.setSourceResolver(context.isEnableSchemaResolver() ? sourceConnector.getSchemaResolver() : null);
         // 0、插件前置处理
         pluginFactory.process(group.getPlugin(), context, ProcessEnum.BEFORE);
 
@@ -162,7 +171,7 @@ public class ParserComponentImpl implements ParserComponent {
             context.setArgs(new ArrayList<>());
             context.setCursors(task.getCursors());
             context.setPageIndex(task.getPageIndex());
-            Result reader = connectorFactory.reader(context.getSourceConnectorInstance(), context);
+            Result reader = connectorFactory.reader(context);
             List<Map> source = reader.getSuccessData();
             if (CollectionUtils.isEmpty(source)) {
                 logger.info("完成全量同步任务:{}, [{}] >> [{}]", metaId, sTableName, tTableName);
@@ -194,7 +203,7 @@ public class ParserComponentImpl implements ParserComponent {
             pluginFactory.process(group.getPlugin(), context, ProcessEnum.AFTER);
 
             // 8、判断尾页
-            if (source.size() < context.getPageIndex()) {
+            if (source.size() < context.getPageSize()) {
                 logger.info("完成全量:{}, [{}] >> [{}]", metaId, sTableName, tTableName);
                 break;
             }
@@ -203,55 +212,46 @@ public class ParserComponentImpl implements ParserComponent {
 
     @Override
     public Result writeBatch(PluginContext context, Executor executor) {
-        final Result result = new Result();
+        Result result = new Result();
         // 终止同步数据到目标源库
         if (context.isTerminated()) {
             result.getSuccessData().addAll(context.getTargetList());
             return result;
         }
 
-        List<Map> dataList = context.getTargetList();
         int batchSize = context.getBatchSize();
-        String tableName = context.getTargetTableName();
-        String event = context.getEvent();
-        Map<String, String> command = context.getCommand();
-        List<Field> fields = context.getTargetFields();
         // 总数
-        int total = dataList.size();
+        int total = context.getTargetList().size();
         // 单次任务
         if (total <= batchSize) {
-            return connectorFactory.writer(context.getTargetConnectorInstance(), new WriterBatchConfig(tableName, event, command, fields, dataList, context.isForceUpdate()));
+            return connectorFactory.writer(context);
         }
 
         // 批量任务, 拆分
         int taskSize = total % batchSize == 0 ? total / batchSize : total / batchSize + 1;
-
-        final CountDownLatch latch = new CountDownLatch(taskSize);
-        int fromIndex = 0;
-        int toIndex = batchSize;
+        CountDownLatch latch = new CountDownLatch(taskSize);
+        int offset = 0;
         for (int i = 0; i < taskSize; i++) {
-            final List<Map> data;
-            if (toIndex > total) {
-                toIndex = fromIndex + (total % batchSize);
-                data = dataList.subList(fromIndex, toIndex);
-            } else {
-                data = dataList.subList(fromIndex, toIndex);
-                fromIndex += batchSize;
-                toIndex += batchSize;
+            try {
+                PluginContext tmpContext = (PluginContext) context.clone();
+                tmpContext.setTargetList(context.getTargetList().stream().skip(offset).limit(batchSize).collect(Collectors.toList()));
+                offset += batchSize;
+                executor.execute(() -> {
+                    try {
+                        Result w = connectorFactory.writer(tmpContext);
+                        result.addSuccessData(w.getSuccessData());
+                        result.addFailData(w.getFailData());
+                        result.getError().append(w.getError());
+                    } catch (Exception e) {
+                        logger.error(e.getMessage());
+                    } finally {
+                        latch.countDown();
+                    }
+                });
+            } catch (CloneNotSupportedException e) {
+                logger.error(e.getMessage(), e);
+                latch.countDown();
             }
-
-            executor.execute(() -> {
-                try {
-                    Result w = connectorFactory.writer(context.getTargetConnectorInstance(), new WriterBatchConfig(tableName, event, command, fields, data, context.isForceUpdate()));
-                    result.addSuccessData(w.getSuccessData());
-                    result.addFailData(w.getFailData());
-                    result.getError().append(w.getError());
-                } catch (Exception e) {
-                    logger.error(e.getMessage());
-                } finally {
-                    latch.countDown();
-                }
-            });
         }
         try {
             latch.await();

+ 2 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/ProfileComponentImpl.java

@@ -149,16 +149,15 @@ public class ProfileComponentImpl implements ProfileComponent {
     @Override
     public List<TableGroup> getTableGroupAll(String mappingId) {
         TableGroup tableGroup = new TableGroup().setMappingId(mappingId);
-        return operationTemplate.queryAll(new QueryConfig(tableGroup, GroupStrategyEnum.TABLE));
+        return operationTemplate.queryAll(new QueryConfig<>(tableGroup, GroupStrategyEnum.TABLE));
     }
 
     @Override
     public List<TableGroup> getSortedTableGroupAll(String mappingId) {
-        List<TableGroup> list = getTableGroupAll(mappingId)
+        return getTableGroupAll(mappingId)
                 .stream()
                 .sorted(Comparator.comparing(TableGroup::getIndex).reversed())
                 .collect(Collectors.toList());
-        return list;
     }
 
     @Override

+ 85 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/impl/TableGroupContextImpl.java

@@ -0,0 +1,85 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
+package org.dbsyncer.parser.impl;
+
+import org.dbsyncer.parser.TableGroupContext;
+import org.dbsyncer.parser.model.Mapping;
+import org.dbsyncer.parser.model.TableGroup;
+import org.dbsyncer.parser.model.TableGroupPicker;
+import org.dbsyncer.parser.util.PickerUtil;
+import org.springframework.stereotype.Component;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * @Author 穿云
+ * @Version 1.0.0
+ * @Date 2025-01-16 23:34
+ */
+@Component
+public final class TableGroupContextImpl implements TableGroupContext {
+
+    /**
+     * 驱动表映射关系
+     */
+    private final Map<String, InnerMapping> tableGroupMap = new ConcurrentHashMap<>();
+
+    @Override
+    public void put(Mapping mapping, List<TableGroup> tableGroups) {
+        tableGroupMap.computeIfAbsent(mapping.getMetaId(), k -> {
+            InnerMapping innerMap = new InnerMapping();
+            tableGroups.forEach(tableGroup -> {
+                String sourceTableName = tableGroup.getSourceTable().getName();
+                innerMap.add(sourceTableName, PickerUtil.mergeTableGroupConfig(mapping, tableGroup));
+            });
+            return innerMap;
+        });
+    }
+
+    @Override
+    public void update(Mapping mapping, List<TableGroup> tableGroups) {
+        tableGroupMap.computeIfPresent(mapping.getMetaId(), (k, innerMap) -> {
+            // 先清空表映射关系,再更新表映射关系
+            tableGroups.stream().findFirst().ifPresent(tableGroup -> innerMap.remove(tableGroup.getSourceTable().getName()));
+            tableGroups.forEach(tableGroup -> {
+                String sourceTableName = tableGroup.getSourceTable().getName();
+                innerMap.add(sourceTableName, PickerUtil.mergeTableGroupConfig(mapping, tableGroup));
+            });
+            return innerMap;
+        });
+    }
+
+    @Override
+    public List<TableGroupPicker> getTableGroupPickers(String metaId, String tableName) {
+        List<TableGroupPicker> list = new ArrayList<>();
+        tableGroupMap.computeIfPresent(metaId, (k, innerMapping) -> {
+            innerMapping.pickerMap.computeIfPresent(tableName, (x, pickers) -> {
+                list.addAll(pickers);
+                return pickers;
+            });
+            return innerMapping;
+        });
+        return list;
+    }
+
+    @Override
+    public void clear(String metaId) {
+        tableGroupMap.remove(metaId);
+    }
+
+    static final class InnerMapping {
+        Map<String, List<TableGroupPicker>> pickerMap = new ConcurrentHashMap<>();
+
+        public void add(String tableName, TableGroup tableGroup) {
+            pickerMap.computeIfAbsent(tableName, k -> new ArrayList<>()).add(new TableGroupPicker(tableGroup));
+        }
+
+        public void remove(String tableName) {
+            pickerMap.remove(tableName);
+        }
+    }
+}

+ 16 - 5
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/AbstractWriter.java

@@ -1,6 +1,7 @@
 package org.dbsyncer.parser.model;
 
 import org.dbsyncer.sdk.enums.ChangedEventTypeEnum;
+import org.dbsyncer.sdk.model.ChangedOffset;
 
 /**
  * @author AE86
@@ -11,7 +12,9 @@ public abstract class AbstractWriter {
 
     private ChangedEventTypeEnum typeEnum;
 
-    private String tableGroupId;
+    private ChangedOffset changedOffset;
+
+    private String tableName;
 
     private String event;
 
@@ -25,12 +28,20 @@ public abstract class AbstractWriter {
         this.typeEnum = typeEnum;
     }
 
-    public String getTableGroupId() {
-        return tableGroupId;
+    public ChangedOffset getChangedOffset() {
+        return changedOffset;
+    }
+
+    public void setChangedOffset(ChangedOffset changedOffset) {
+        this.changedOffset = changedOffset;
+    }
+
+    public String getTableName() {
+        return tableName;
     }
 
-    public void setTableGroupId(String tableGroupId) {
-        this.tableGroupId = tableGroupId;
+    public void setTableName(String tableName) {
+        this.tableName = tableName;
     }
 
     public String getEvent() {

+ 0 - 166
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/FieldPicker.java

@@ -1,166 +0,0 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.parser.model;
-
-import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.common.util.DateFormatUtil;
-import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.sdk.filter.CompareFilter;
-import org.dbsyncer.sdk.enums.FilterEnum;
-import org.dbsyncer.sdk.enums.OperationEnum;
-import org.dbsyncer.sdk.model.Field;
-import org.dbsyncer.sdk.model.Filter;
-import org.springframework.util.Assert;
-
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-public class FieldPicker {
-
-    private final TableGroup tableGroup;
-    private List<Node> index;
-    private int indexSize;
-    private boolean enabledFilter;
-    private List<Filter> add;
-    private List<Filter> or;
-
-    public FieldPicker(TableGroup tableGroup) {
-        this.tableGroup = tableGroup;
-    }
-
-    public FieldPicker(TableGroup tableGroup, List<Filter> filter, List<Field> column, List<FieldMapping> fieldMapping) {
-        this.tableGroup = tableGroup;
-        init(filter, column, fieldMapping);
-    }
-
-    public Map<String, Object> getColumns(List<Object> list) {
-        if (!CollectionUtils.isEmpty(list)) {
-            Map<String, Object> data = new HashMap<>(indexSize);
-            final int size = list.size() - 1;
-            index.forEach(node -> {
-                if (node.i <= size) {
-                    data.put(node.name, list.get(node.i));
-                }
-            });
-            return data;
-        }
-        return Collections.EMPTY_MAP;
-    }
-
-    /**
-     * 根据过滤条件过滤
-     *
-     * @param row
-     * @return
-     */
-    public boolean filter(Map<String, Object> row) {
-        if (!enabledFilter) {
-            return true;
-        }
-        // where (id > 1 and id < 100) or (id = 100 or id =101)
-        // 或 关系(成立任意条件)
-        Object value = null;
-        for (Filter f : or) {
-            value = row.get(f.getName());
-            if (null == value) {
-                continue;
-            }
-            if (compareValueWithFilter(f, value)) {
-                return true;
-            }
-        }
-
-        boolean pass = false;
-        // 并 关系(成立所有条件)
-        for (Filter f : add) {
-            value = row.get(f.getName());
-            if (null == value) {
-                continue;
-            }
-            if (!compareValueWithFilter(f, value)) {
-                return false;
-            }
-            pass = true;
-        }
-
-        return pass;
-    }
-
-    /**
-     * 比较值是否满足过滤条件
-     *
-     * @param filter        过滤器
-     * @param comparedValue 比较值
-     * @return
-     */
-    private boolean compareValueWithFilter(Filter filter, Object comparedValue) {
-        CompareFilter compareFilter = FilterEnum.getCompareFilter(filter.getFilter());
-        if (null == filter) {
-            return false;
-        }
-
-        // 支持时间比较
-        if (comparedValue instanceof Timestamp) {
-            Timestamp comparedTimestamp = (Timestamp) comparedValue;
-            Timestamp filterTimestamp = DateFormatUtil.stringToTimestamp(filter.getValue());
-            return compareFilter.compare(String.valueOf(comparedTimestamp.getTime()), String.valueOf(filterTimestamp.getTime()));
-        }
-        if (comparedValue instanceof Date) {
-            Date comparedDate = (Date) comparedValue;
-            Date filterDate = DateFormatUtil.stringToDate(filter.getValue());
-            return compareFilter.compare(String.valueOf(comparedDate.getTime()), String.valueOf(filterDate.getTime()));
-        }
-
-        return compareFilter.compare(String.valueOf(comparedValue), filter.getValue());
-    }
-
-    private void init(List<Filter> filter, List<Field> column, List<FieldMapping> fieldMapping) {
-        // column  => [1, 86, 0, 中文, 2020-05-15T12:17:22.000+0800, 备注信息]
-        Assert.notEmpty(column, "读取字段不能为空.");
-        Assert.notEmpty(fieldMapping, "映射关系不能为空.");
-
-        // 解析过滤条件
-        if ((enabledFilter = !CollectionUtils.isEmpty(filter))) {
-            add = filter.stream().filter(f -> StringUtil.equals(f.getOperation(), OperationEnum.AND.getName())).collect(
-                    Collectors.toList());
-            or = filter.stream().filter(f -> StringUtil.equals(f.getOperation(), OperationEnum.OR.getName())).collect(Collectors.toList());
-        }
-
-        // 记录字段索引 [{"ID":0},{"NAME":1}]
-        index = new LinkedList<>();
-        int size = column.size();
-        String k = null;
-        Field field = null;
-        for (int i = 0; i < size; i++) {
-            field = column.get(i);
-            k = field.isUnmodifiabled() ? field.getLabelName() : field.getName();
-            index.add(new Node(k, i));
-        }
-        Assert.notEmpty(index, "同步映射关系不能为空.");
-        this.indexSize = index.size();
-    }
-
-    public TableGroup getTableGroup() {
-        return tableGroup;
-    }
-
-    final class Node {
-        // 属性
-        String name;
-        // 索引
-        int i;
-
-        public Node(String name, int i) {
-            this.name = name;
-            this.i = i;
-        }
-    }
-
-}

+ 40 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Group.java

@@ -0,0 +1,40 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
+package org.dbsyncer.parser.model;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @Author 穿云
+ * @Version 1.0.0
+ * @Date 2025-01-26 21:36
+ */
+public final class Group {
+    private final List<String> index = new ArrayList<>();
+
+    public synchronized void remove(String e) {
+        index.remove(e);
+    }
+
+    public int size() {
+        return index.size();
+    }
+
+    public List<String> getIndex() {
+        return index;
+    }
+
+    public boolean contains(String id) {
+        return index.contains(id);
+    }
+
+    public void add(String id) {
+        index.add(id);
+    }
+
+    public boolean isEmpty() {
+        return index.isEmpty();
+    }
+}

+ 2 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Meta.java

@@ -3,7 +3,7 @@ package org.dbsyncer.parser.model;
 import org.dbsyncer.parser.enums.MetaEnum;
 import org.dbsyncer.sdk.constant.ConfigConstant;
 
-import java.util.LinkedHashMap;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -50,7 +50,7 @@ public class Meta extends ConfigModel {
         this.total = new AtomicLong(0);
         this.success = new AtomicLong(0);
         this.fail = new AtomicLong(0);
-        this.snapshot = new LinkedHashMap<>();
+        this.snapshot = new HashMap<>();
         this.beginTime = 0L;
         this.endTime = 0L;
     }

+ 144 - 27
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Picker.java

@@ -1,54 +1,167 @@
 package org.dbsyncer.parser.model;
 
 import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.DateFormatUtil;
 import org.dbsyncer.common.util.StringUtil;
+import org.dbsyncer.sdk.enums.FilterEnum;
+import org.dbsyncer.sdk.enums.OperationEnum;
+import org.dbsyncer.sdk.filter.CompareFilter;
 import org.dbsyncer.sdk.model.Field;
+import org.dbsyncer.sdk.model.Filter;
+import org.dbsyncer.sdk.schema.SchemaResolver;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.*;
 import java.util.stream.Collectors;
 
 public class Picker {
 
-    private List<Field> sourceFields = new ArrayList<>();
-    private List<Field> targetFields = new ArrayList<>();
+    private final List<Field> sourceFields = new ArrayList<>();
+    private final List<Field> targetFields = new ArrayList<>();
+    private final int sFieldSize;
+    private final int tFieldSize;
+    private final boolean enabledFilter;
+    private List<Filter> add;
+    private List<Filter> or;
+    private SchemaResolver sourceResolver;
 
-    public Picker(List<FieldMapping> fieldMapping) {
-        if (!CollectionUtils.isEmpty(fieldMapping)) {
-            fieldMapping.forEach(m -> {
+    public Picker(TableGroup tableGroup) {
+        if (!CollectionUtils.isEmpty(tableGroup.getFieldMapping())) {
+            tableGroup.getFieldMapping().forEach(m -> {
                 sourceFields.add(m.getSource());
                 targetFields.add(m.getTarget());
             });
         }
+        this.sFieldSize = sourceFields.size();
+        this.tFieldSize = targetFields.size();
+        // 解析过滤条件
+        List<Filter> filter = tableGroup.getFilter();
+        enabledFilter = !CollectionUtils.isEmpty(filter);
+        if (enabledFilter) {
+            add = filter.stream().filter(f -> StringUtil.equals(f.getOperation(), OperationEnum.AND.getName())).collect(Collectors.toList());
+            or = filter.stream().filter(f -> StringUtil.equals(f.getOperation(), OperationEnum.OR.getName())).collect(Collectors.toList());
+        }
     }
 
     public List<Map> pickTargetData(List<Map> source) {
         List<Map> targetMapList = new ArrayList<>();
         if (!CollectionUtils.isEmpty(source)) {
-            final int size = source.size();
-            final int sFieldSize = sourceFields.size();
-            final int tFieldSize = targetFields.size();
             Map<String, Object> target = null;
-            for (int i = 0; i < size; i++) {
+            for (Map row : source) {
                 target = new HashMap<>();
-                exchange(sFieldSize, tFieldSize, sourceFields, targetFields, source.get(i), target);
+                exchange(sFieldSize, tFieldSize, this.sourceFields, this.targetFields, row, target);
                 targetMapList.add(target);
             }
         }
         return targetMapList;
     }
 
-    public Map pickSourceData(Map target) {
+    public List<Map> pickTargetData(List<Field> sourceOriginalFields, boolean enableFilter, List<List<Object>> rows, List<Map> sourceMapList) {
+        List<Map> targetMapList = new ArrayList<>();
+        if (CollectionUtils.isEmpty(rows)) {
+            return targetMapList;
+        }
+        Map<String, Object> source = null;
+        Map<String, Object> target = null;
+        for (List<Object> row : rows) {
+            // 排除下标不一致的数据
+            if (row.size() != sourceOriginalFields.size()) {
+                continue;
+            }
+            source = new HashMap<>();
+            for (int j = 0; j < sourceOriginalFields.size(); j++) {
+                source.put(sourceOriginalFields.get(j).getName(), row.get(j));
+            }
+            target = new HashMap<>();
+            exchange(sFieldSize, tFieldSize, this.sourceFields, this.targetFields, source, target);
+            // 根据条件过滤数据
+            if (enableFilter && !filter(target)) {
+                continue;
+            }
+            sourceMapList.add(source);
+            targetMapList.add(target);
+        }
+        return targetMapList;
+    }
+
+    public List<Object> pickSourceData(Map target) {
         Map<String, Object> source = new HashMap<>();
         if (!CollectionUtils.isEmpty(target)) {
-            exchange(targetFields.size(), sourceFields.size(), targetFields, sourceFields, target, source);
+            exchange(tFieldSize, sFieldSize, targetFields, sourceFields, target, source);
         }
-        return source;
+
+        return getFields(sourceFields).stream().map(field -> source.get(field.getName())).collect(Collectors.toList());
+    }
+
+    public List<Field> getSourceFields() {
+        return getFields(sourceFields);
+    }
+
+    public List<Field> getTargetFields() {
+        return getFields(targetFields);
+    }
+
+    public Map<String, Field> getTargetFieldMap() {
+        return targetFields.stream().filter(Objects::nonNull).collect(Collectors.toMap(Field::getName, f -> f, (k1, k2) -> k1));
+    }
+
+    private boolean filter(Map<String, Object> row) {
+        if (!enabledFilter) {
+            return true;
+        }
+        // where (id > 1 and id < 100) or (id = 100 or id =101)
+        // 或 关系(成立任意条件)
+        Object value = null;
+        for (Filter f : or) {
+            value = row.get(f.getName());
+            if (null == value) {
+                continue;
+            }
+            if (compareValueWithFilter(f, value)) {
+                return true;
+            }
+        }
+
+        boolean pass = false;
+        // 并 关系(成立所有条件)
+        for (Filter f : add) {
+            value = row.get(f.getName());
+            if (null == value) {
+                continue;
+            }
+            if (!compareValueWithFilter(f, value)) {
+                return false;
+            }
+            pass = true;
+        }
+
+        return pass;
+    }
+
+    /**
+     * 比较值是否满足过滤条件
+     *
+     * @param filter        过滤器
+     * @param comparedValue 比较值
+     * @return
+     */
+    private boolean compareValueWithFilter(Filter filter, Object comparedValue) {
+        CompareFilter compareFilter = FilterEnum.getCompareFilter(filter.getFilter());
+
+        // 支持时间比较
+        if (comparedValue instanceof Timestamp) {
+            Timestamp comparedTimestamp = (Timestamp) comparedValue;
+            Timestamp filterTimestamp = DateFormatUtil.stringToTimestamp(filter.getValue());
+            return compareFilter.compare(String.valueOf(comparedTimestamp.getTime()), String.valueOf(filterTimestamp.getTime()));
+        }
+        if (comparedValue instanceof java.sql.Date) {
+            java.sql.Date comparedDate = (java.sql.Date) comparedValue;
+            Date filterDate = DateFormatUtil.stringToDate(filter.getValue());
+            return compareFilter.compare(String.valueOf(comparedDate.getTime()), String.valueOf(filterDate.getTime()));
+        }
+
+        return compareFilter.compare(String.valueOf(comparedValue), filter.getValue());
     }
 
     private void exchange(int sFieldSize, int tFieldSize, List<Field> sFields, List<Field> tFields, Map<String, Object> source, Map<String, Object> target) {
@@ -62,7 +175,11 @@ public class Picker {
                 tField = tFields.get(k);
             }
             if (null != sField && null != tField) {
-                v = source.get(sField.isUnmodifiabled() ? sField.getLabelName() : sField.getName());
+                v = source.get(sField.getName());
+                // 合并为标准数据类型
+                if (sourceResolver != null) {
+                    v = sourceResolver.merge(v, sField);
+                }
                 tFieldName = tField.getName();
                 // 映射值
                 if (!target.containsKey(tFieldName)) {
@@ -70,16 +187,15 @@ public class Picker {
                     continue;
                 }
                 // 合并值
-                String mergedValue = new StringBuilder(StringUtil.toString(target.get(tFieldName))).append(StringUtil.toString(v)).toString();
-                target.put(tFieldName, mergedValue);
+                target.put(tFieldName, StringUtil.toString(target.get(tFieldName)) + StringUtil.toString(v));
             }
         }
     }
 
-    public List<Field> getTargetFields() {
+    private List<Field> getFields(List<Field> list) {
         List<Field> fields = new ArrayList<>();
         Set<String> keys = new HashSet<>();
-        targetFields.stream().forEach(f -> {
+        list.forEach(f -> {
             if (f != null && !keys.contains(f.getName())) {
                 fields.add(f);
                 keys.add(f.getName());
@@ -89,7 +205,8 @@ public class Picker {
         return Collections.unmodifiableList(fields);
     }
 
-    public Map<String, Field> getTargetFieldMap() {
-        return targetFields.stream().filter(f -> null != f).collect(Collectors.toMap(Field::getName, f -> f, (k1, k2) -> k1));
+    public Picker setSourceResolver(SchemaResolver sourceResolver) {
+        this.sourceResolver = sourceResolver;
+        return this;
     }
 }

+ 1 - 1
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/QueryConfig.java

@@ -7,7 +7,7 @@ import org.dbsyncer.parser.enums.GroupStrategyEnum;
 
 public class QueryConfig<T> {
 
-    private ConfigModel configModel;
+    private final ConfigModel configModel;
 
     private GroupStrategyEnum groupStrategyEnum = GroupStrategyEnum.DEFAULT;
 

+ 13 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/SystemConfig.java

@@ -69,6 +69,11 @@ public class SystemConfig extends ConfigModel {
      */
     private String watermark;
 
+    /**
+     * 是否启用字段解析器
+     */
+    private boolean enableSchemaResolver;
+
     public int getExpireDataDays() {
         return expireDataDays;
     }
@@ -148,4 +153,12 @@ public class SystemConfig extends ConfigModel {
     public void setWatermark(String watermark) {
         this.watermark = watermark;
     }
+
+    public boolean isEnableSchemaResolver() {
+        return enableSchemaResolver;
+    }
+
+    public void setEnableSchemaResolver(boolean enableSchemaResolver) {
+        this.enableSchemaResolver = enableSchemaResolver;
+    }
 }

+ 47 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/TableGroupPicker.java

@@ -0,0 +1,47 @@
+/**
+ * DBSyncer Copyright 2020-2025 All Rights Reserved.
+ */
+package org.dbsyncer.parser.model;
+
+import org.dbsyncer.sdk.model.Field;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Author 穿云
+ * @Version 1.0.0
+ * @Date 2025-01-18 22:53
+ */
+public class TableGroupPicker {
+
+    private final TableGroup tableGroup;
+
+    private final Picker picker;
+
+    private final List<Field> sourceFields;
+    private final List<Field> targetFields;
+
+    public TableGroupPicker(TableGroup tableGroup) {
+        this.tableGroup = tableGroup;
+        this.picker = new Picker(tableGroup);
+        this.sourceFields = picker.getSourceFields();
+        this.targetFields = picker.getTargetFields();
+    }
+
+    public TableGroup getTableGroup() {
+        return tableGroup;
+    }
+
+    public Picker getPicker() {
+        return picker;
+    }
+
+    public List<Field> getSourceFields() {
+        return sourceFields;
+    }
+
+    public List<Field> getTargetFields() {
+        return targetFields;
+    }
+}

+ 0 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/Task.java

@@ -16,9 +16,6 @@ public class Task {
 
     private long endTime;
 
-    public Task() {
-    }
-
     public Task(String id) {
         this.id = id;
         this.state = StateEnum.RUNNING;

+ 7 - 14
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterRequest.java

@@ -2,9 +2,8 @@ package org.dbsyncer.parser.model;
 
 import org.dbsyncer.parser.flush.BufferRequest;
 import org.dbsyncer.sdk.listener.ChangedEvent;
-import org.dbsyncer.sdk.model.ChangedOffset;
 
-import java.util.Map;
+import java.util.List;
 
 /**
  * @author AE86
@@ -13,30 +12,24 @@ import java.util.Map;
  */
 public class WriterRequest extends AbstractWriter implements BufferRequest {
 
-    private Map row;
+    private final List<Object> row;
 
-    private ChangedOffset changedOffset;
-
-    public WriterRequest(String tableGroupId, ChangedEvent event) {
+    public WriterRequest(ChangedEvent event) {
         setTypeEnum(event.getType());
-        setTableGroupId(tableGroupId);
+        setChangedOffset(event.getChangedOffset());
+        setTableName(event.getSourceTableName());
         setEvent(event.getEvent());
         setSql(event.getSql());
         this.row = event.getChangedRow();
-        this.changedOffset = event.getChangedOffset();
     }
 
     @Override
     public String getMetaId() {
-        return changedOffset.getMetaId();
+        return getChangedOffset().getMetaId();
     }
 
-    public Map getRow() {
+    public List<Object> getRow() {
         return row;
     }
 
-    public ChangedOffset getChangedOffset() {
-        return changedOffset;
-    }
-
 }

+ 3 - 15
dbsyncer-parser/src/main/java/org/dbsyncer/parser/model/WriterResponse.java

@@ -2,11 +2,9 @@ package org.dbsyncer.parser.model;
 
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.parser.flush.BufferResponse;
-import org.dbsyncer.sdk.model.ChangedOffset;
 
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 
 /**
  * @author AE86
@@ -15,9 +13,7 @@ import java.util.Map;
  */
 public class WriterResponse extends AbstractWriter implements BufferResponse {
 
-    private List<Map> dataList = new LinkedList<>();
-
-    private List<ChangedOffset> offsetList = new LinkedList<>();
+    private final List<List<Object>> dataList = new LinkedList<>();
 
     private transient boolean isMerged;
 
@@ -31,22 +27,14 @@ public class WriterResponse extends AbstractWriter implements BufferResponse {
         return StringUtil.SYMBOL.concat(getEvent());
     }
 
-    public void addData(Map data) {
+    public void addData(List<Object> data) {
         dataList.add(data);
     }
 
-    public void addChangedOffset(ChangedOffset changedOffset) {
-        offsetList.add(changedOffset);
-    }
-
-    public List<Map> getDataList() {
+    public List<List<Object>> getDataList() {
         return dataList;
     }
 
-    public List<ChangedOffset> getOffsetList() {
-        return offsetList;
-    }
-
     public boolean isMerged() {
         return isMerged;
     }

+ 5 - 5
dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/PluginFactory.java

@@ -24,11 +24,11 @@ import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.ServiceLoader;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 
 /**
@@ -55,7 +55,7 @@ public class PluginFactory implements DisposableBean {
 
     private final List<Plugin> plugins = new LinkedList<>();
 
-    private final Map<String, PluginService> service = new LinkedHashMap<>();
+    private final Map<String, PluginService> service = new ConcurrentHashMap<>();
 
     @Resource
     private ApplicationContext applicationContext;
@@ -80,7 +80,7 @@ public class PluginFactory implements DisposableBean {
 
     public synchronized void loadPlugins() {
         if (!CollectionUtils.isEmpty(plugins)) {
-            List<Plugin> unmodifiablePlugin = plugins.stream().filter(p -> p.isUnmodifiable()).collect(Collectors.toList());
+            List<Plugin> unmodifiablePlugin = plugins.stream().filter(Plugin::isUnmodifiable).collect(Collectors.toList());
             plugins.clear();
             plugins.addAll(unmodifiablePlugin);
         }
@@ -91,7 +91,7 @@ public class PluginFactory implements DisposableBean {
         }
         Collection<File> files = FileUtils.listFiles(new File(PLUGIN_PATH), new String[]{"jar"}, true);
         if (!CollectionUtils.isEmpty(files)) {
-            files.forEach(f -> loadPlugin(f));
+            files.forEach(this::loadPlugin);
         }
         logger.info("PreLoad plugin:{}", plugins.size());
     }
@@ -135,7 +135,7 @@ public class PluginFactory implements DisposableBean {
     }
 
     public String createPluginId(String pluginClassName, String pluginVersion) {
-        return new StringBuilder(pluginClassName).append("_").append(pluginVersion).toString();
+        return pluginClassName + "_" + pluginVersion;
     }
 
     /**

+ 2 - 71
dbsyncer-plugin/src/main/java/org/dbsyncer/plugin/impl/FullPluginContext.java

@@ -1,87 +1,18 @@
 package org.dbsyncer.plugin.impl;
 
-import org.dbsyncer.sdk.model.Table;
-import org.dbsyncer.sdk.plugin.AbstractPluginContext;
 import org.dbsyncer.sdk.enums.ModelEnum;
-import org.dbsyncer.sdk.plugin.ReaderContext;
-
-import java.util.List;
+import org.dbsyncer.sdk.plugin.AbstractPluginContext;
 
 /**
  * @author AE86
  * @version 1.0.0s
  * @date 2022/6/30 16:04
  */
-public final class FullPluginContext extends AbstractPluginContext implements ReaderContext {
-
-    private Table sourceTable;
-
-    private boolean supportedCursor;
-
-    private List<Object> args;
-
-    private Object[] cursors;
-
-    private int pageIndex;
-
-    private int pageSize;
+public final class FullPluginContext extends AbstractPluginContext {
 
     @Override
     public ModelEnum getModelEnum() {
         return ModelEnum.FULL;
     }
 
-    @Override
-    public Table getSourceTable() {
-        return sourceTable;
-    }
-
-    public void setSourceTable(Table sourceTable) {
-        this.sourceTable = sourceTable;
-    }
-
-    @Override
-    public boolean isSupportedCursor() {
-        return supportedCursor;
-    }
-
-    public void setSupportedCursor(boolean supportedCursor) {
-        this.supportedCursor = supportedCursor;
-    }
-
-    @Override
-    public List<Object> getArgs() {
-        return args;
-    }
-
-    public void setArgs(List<Object> args) {
-        this.args = args;
-    }
-
-    @Override
-    public Object[] getCursors() {
-        return cursors;
-    }
-
-    public void setCursors(Object[] cursors) {
-        this.cursors = cursors;
-    }
-
-    @Override
-    public int getPageIndex() {
-        return pageIndex;
-    }
-
-    public void setPageIndex(int pageIndex) {
-        this.pageIndex = pageIndex;
-    }
-
-    @Override
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    public void setPageSize(int pageSize) {
-        this.pageSize = pageSize;
-    }
 }

+ 27 - 30
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/DDLConfig.java

@@ -1,28 +1,25 @@
 package org.dbsyncer.sdk.config;
 
 import org.dbsyncer.sdk.enums.DDLOperationEnum;
-import org.dbsyncer.sdk.model.Field;
 
+import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 
 public class DDLConfig {
-    /**
-     * 执行命令
-     */
+
     private String sql;
 
     private DDLOperationEnum ddlOperationEnum;
 
-    private List<Field> addFields = new LinkedList<>();
+    private List<String> addedFieldNames = new LinkedList<>();
 
-    private List<Field> removeFields = new LinkedList<>();
+    private List<String> modifiedFieldNames = new LinkedList<>();
 
-    //记录源表的源字段名称
-    private String sourceColumnName;
+    private List<String> droppedFieldNames = new LinkedList<>();
 
-    //记录改变后的字段名称
-    private String changedColumnName;
+    private Map<String, String> changedFieldNames = new LinkedHashMap<>();
 
     public String getSql() {
         return sql;
@@ -32,43 +29,43 @@ public class DDLConfig {
         this.sql = sql;
     }
 
-    public List<Field> getAddFields() {
-        return addFields;
+    public DDLOperationEnum getDdlOperationEnum() {
+        return ddlOperationEnum;
     }
 
-    public void setAddFields(List<Field> addFields) {
-        this.addFields = addFields;
+    public void setDdlOperationEnum(DDLOperationEnum ddlOperationEnum) {
+        this.ddlOperationEnum = ddlOperationEnum;
     }
 
-    public List<Field> getRemoveFields() {
-        return removeFields;
+    public List<String> getAddedFieldNames() {
+        return addedFieldNames;
     }
 
-    public void setRemoveFields(List<Field> removeFields) {
-        this.removeFields = removeFields;
+    public void setAddedFieldNames(List<String> addedFieldNames) {
+        this.addedFieldNames = addedFieldNames;
     }
 
-    public String getSourceColumnName() {
-        return sourceColumnName;
+    public List<String> getModifiedFieldNames() {
+        return modifiedFieldNames;
     }
 
-    public void setSourceColumnName(String sourceColumnName) {
-        this.sourceColumnName = sourceColumnName;
+    public void setModifiedFieldNames(List<String> modifiedFieldNames) {
+        this.modifiedFieldNames = modifiedFieldNames;
     }
 
-    public String getChangedColumnName() {
-        return changedColumnName;
+    public List<String> getDroppedFieldNames() {
+        return droppedFieldNames;
     }
 
-    public void setChangedColumnName(String changedColumnName) {
-        this.changedColumnName = changedColumnName;
+    public void setDroppedFieldNames(List<String> droppedFieldNames) {
+        this.droppedFieldNames = droppedFieldNames;
     }
 
-    public DDLOperationEnum getDdlOperationEnum() {
-        return ddlOperationEnum;
+    public Map<String, String> getChangedFieldNames() {
+        return changedFieldNames;
     }
 
-    public void setDdlOperationEnum(DDLOperationEnum ddlOperationEnum) {
-        this.ddlOperationEnum = ddlOperationEnum;
+    public void setChangedFieldNames(Map<String, String> changedFieldNames) {
+        this.changedFieldNames = changedFieldNames;
     }
 }

+ 3 - 3
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/DatabaseConfig.java

@@ -6,9 +6,9 @@ package org.dbsyncer.sdk.config;
 import org.dbsyncer.sdk.model.ConnectorConfig;
 import org.dbsyncer.sdk.model.SqlTable;
 
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @author AE86
@@ -61,14 +61,14 @@ public class DatabaseConfig extends ConnectorConfig {
     /**
      * 参数配置
      */
-    private Map<String, String> properties = new LinkedHashMap<>();
+    private Map<String, String> properties = new ConcurrentHashMap<>();
 
     public String getProperty(String key) {
         return properties.get(key);
     }
 
     public String getProperty(String key, String defaultValue) {
-        return properties.containsKey(key) ? properties.get(key) : defaultValue;
+        return properties.getOrDefault(key, defaultValue);
     }
 
     public String getDriverClassName() {

+ 1 - 1
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/ListenerConfig.java

@@ -58,7 +58,7 @@ public class ListenerConfig {
     /**
      * 禁用ddl事件
      */
-    private boolean enableDDL = true;
+    private boolean enableDDL;
 
     public ListenerConfig() {
     }

+ 0 - 67
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/config/WriterBatchConfig.java

@@ -1,67 +0,0 @@
-package org.dbsyncer.sdk.config;
-
-import org.dbsyncer.sdk.model.Field;
-
-import java.util.List;
-import java.util.Map;
-
-public class WriterBatchConfig {
-
-    /**
-     * 表名
-     */
-    private String tableName;
-    /**
-     * 事件
-     */
-    private String event;
-    /**
-     * 执行命令
-     */
-    private Map<String, String> command;
-    /**
-     * 字段信息
-     */
-    private List<Field> fields;
-    /**
-     * 集合数据
-     */
-    private List<Map> data;
-    /**
-     * 覆盖写入
-     */
-    private boolean forceUpdate;
-
-    public WriterBatchConfig(String tableName, String event, Map<String, String> command, List<Field> fields, List<Map> data, boolean forceUpdate) {
-        this.tableName = tableName;
-        this.event = event;
-        this.command = command;
-        this.fields = fields;
-        this.data = data;
-        this.forceUpdate = forceUpdate;
-    }
-
-    public String getTableName() {
-        return tableName;
-    }
-
-    public String getEvent() {
-        return event;
-    }
-
-    public Map<String, String> getCommand() {
-        return command;
-    }
-
-    public List<Field> getFields() {
-        return fields;
-    }
-
-    public List<Map> getData() {
-        return data;
-    }
-
-    public boolean isForceUpdate() {
-        return forceUpdate;
-    }
-}

+ 18 - 31
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/AbstractConnector.java

@@ -6,7 +6,6 @@ package org.dbsyncer.sdk.connector;
 import org.dbsyncer.common.util.CollectionUtils;
 import org.dbsyncer.common.util.StringUtil;
 import org.dbsyncer.sdk.SdkException;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
 import org.dbsyncer.sdk.connector.schema.BigintValueMapper;
 import org.dbsyncer.sdk.connector.schema.BinaryValueMapper;
 import org.dbsyncer.sdk.connector.schema.BitValueMapper;
@@ -35,19 +34,20 @@ import org.dbsyncer.sdk.connector.schema.VarBinaryValueMapper;
 import org.dbsyncer.sdk.connector.schema.VarcharValueMapper;
 import org.dbsyncer.sdk.constant.ConnectorConstant;
 import org.dbsyncer.sdk.model.Field;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.schema.SchemaResolver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.sql.Types;
-import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 public abstract class AbstractConnector {
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    protected final Map<Integer, ValueMapper> VALUE_MAPPERS = new LinkedHashMap<>();
+    protected final Map<Integer, ValueMapper> VALUE_MAPPERS = new ConcurrentHashMap<>();
 
     public AbstractConnector() {
         // 常用类型
@@ -83,36 +83,21 @@ public abstract class AbstractConnector {
         VALUE_MAPPERS.putIfAbsent(Types.OTHER, new OtherValueMapper());
     }
 
-    /**
-     * 获取标准数据类型解析器
-     *
-     * @return
-     */
-    protected SchemaResolver getSchemaResolver() {
-        return null;
-    }
-
     /**
      * 转换字段值
      *
+     * @param context
      * @param connectorInstance
-     * @param config
      */
-    public void convertProcessBeforeWriter(ConnectorInstance connectorInstance, WriterBatchConfig config) {
-        if (CollectionUtils.isEmpty(config.getFields()) || CollectionUtils.isEmpty(config.getData())) {
-            return;
-        }
-
-        final SchemaResolver resolver = getSchemaResolver();
-        if (resolver != null) {
-            convert(config, resolver);
+    public void convertProcessBeforeWriter(PluginContext context, ConnectorInstance connectorInstance) {
+        if (CollectionUtils.isEmpty(context.getTargetFields()) || CollectionUtils.isEmpty(context.getTargetList())) {
             return;
         }
 
         // 获取字段映射规则
-        for (Map row : config.getData()) {
+        for (Map row : context.getTargetList()) {
             // 根据目标字段类型转换值
-            for (Field f : config.getFields()) {
+            for (Field f : context.getTargetFields()) {
                 if (null == f) {
                     continue;
                 }
@@ -123,7 +108,7 @@ public abstract class AbstractConnector {
                     try {
                         row.put(f.getName(), valueMapper.convertValue(connectorInstance, row.get(f.getName())));
                     } catch (Exception e) {
-                        logger.error("convert value error: ({}, {})", f.getName(), row.get(f.getName()));
+                        logger.error("convert value error: ({}, {}, {})", context.getTargetTableName(), f.getName(), row.get(f.getName()));
                         throw new SdkException(e);
                     }
                 }
@@ -131,18 +116,20 @@ public abstract class AbstractConnector {
         }
     }
 
-    private void convert(WriterBatchConfig config, SchemaResolver resolver) {
-        for (Map row : config.getData()) {
-            for (Field f : config.getFields()) {
+    public void convertProcessBeforeWriter(PluginContext context, SchemaResolver targetResolver) {
+        if (CollectionUtils.isEmpty(context.getTargetFields()) || CollectionUtils.isEmpty(context.getTargetList())) {
+            return;
+        }
+
+        for (Map row : context.getTargetList()) {
+            for (Field f : context.getTargetFields()) {
                 if (null == f) {
                     continue;
                 }
                 try {
-                    // 根据目标字段类型转换值
-                    Object o = resolver.merge(row.get(f.getName()), f);
-                    row.put(f.getName(), resolver.convert(o, f));
+                    row.computeIfPresent(f.getName(), (k, v) -> targetResolver.convert(v, f));
                 } catch (Exception e) {
-                    logger.error(String.format("convert value error: (%s, %s, %s)", config.getTableName(), f.getName(), row.get(f.getName())), e);
+                    logger.error(String.format("convert value error: (%s, %s, %s)", context.getTargetTableName(), f.getName(), row.get(f.getName())), e);
                     throw new SdkException(e);
                 }
             }

+ 1 - 3
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/AbstractDQLConnector.java

@@ -80,9 +80,7 @@ public abstract class AbstractDQLConnector extends AbstractDatabaseConnector {
         map.put(SqlBuilderEnum.QUERY.getName(), getPageSql(pageSql));
 
         // 获取查询总数SQL
-        StringBuilder queryCount = new StringBuilder();
-        queryCount.append("SELECT COUNT(1) FROM (").append(querySql).append(") DBS_T");
-        map.put(SqlBuilderEnum.QUERY_COUNT.getName(), queryCount.toString());
+        map.put(SqlBuilderEnum.QUERY_COUNT.getName(), "SELECT COUNT(1) FROM (" + querySql + ") DBS_T");
         return map;
     }
 

+ 24 - 36
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/AbstractDatabaseConnector.java

@@ -11,6 +11,7 @@ import org.dbsyncer.sdk.config.*;
 import org.dbsyncer.sdk.connector.AbstractConnector;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
 import org.dbsyncer.sdk.connector.database.ds.SimpleConnection;
+import org.dbsyncer.sdk.constant.ConfigConstant;
 import org.dbsyncer.sdk.constant.ConnectorConstant;
 import org.dbsyncer.sdk.constant.DatabaseConstant;
 import org.dbsyncer.sdk.enums.OperationEnum;
@@ -18,6 +19,7 @@ import org.dbsyncer.sdk.enums.QuartzFilterEnum;
 import org.dbsyncer.sdk.enums.SqlBuilderEnum;
 import org.dbsyncer.sdk.enums.TableTypeEnum;
 import org.dbsyncer.sdk.model.*;
+import org.dbsyncer.sdk.plugin.PluginContext;
 import org.dbsyncer.sdk.plugin.ReaderContext;
 import org.dbsyncer.sdk.spi.ConnectorService;
 import org.dbsyncer.sdk.util.DatabaseUtil;
@@ -50,21 +52,6 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
-    /**
-     * 系统函数表达式$convert()$
-     */
-    private final String SYS_EXPRESSION = "^[$].*[$]$";
-
-    @Override
-    public boolean isSupportedTiming() {
-        return true;
-    }
-
-    @Override
-    public boolean isSupportedLog() {
-        return true;
-    }
-
     @Override
     public Class<DatabaseConfig> getConfigClass() {
         return DatabaseConfig.class;
@@ -163,14 +150,15 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
     }
 
     @Override
-    public Result writer(DatabaseConnectorInstance connectorInstance, WriterBatchConfig config) {
-        String event = config.getEvent();
-        List<Map> data = config.getData();
+    public Result writer(DatabaseConnectorInstance connectorInstance, PluginContext context) {
+        String event = context.getEvent();
+        List<Map> data = context.getTargetList();
+        List<Field> targetFields = context.getTargetFields();
 
         // 1、获取SQL
-        String executeSql = config.getCommand().get(event);
+        String executeSql = context.getCommand().get(event);
         Assert.hasText(executeSql, "执行SQL语句不能为空.");
-        if (CollectionUtils.isEmpty(config.getFields())) {
+        if (CollectionUtils.isEmpty(targetFields)) {
             logger.error("writer fields can not be empty.");
             throw new SdkException("writer fields can not be empty.");
         }
@@ -178,8 +166,8 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
             logger.error("writer data can not be empty.");
             throw new SdkException("writer data can not be empty.");
         }
-        List<Field> fields = new ArrayList<>(config.getFields());
-        List<Field> pkFields = PrimaryKeyUtil.findConfigPrimaryKeyFields(config);
+        List<Field> fields = new ArrayList<>(targetFields);
+        List<Field> pkFields = PrimaryKeyUtil.findExistPrimaryKeyFields(targetFields);
         // Update / Delete
         if (!isInsert(event)) {
             if (isDelete(event)) {
@@ -196,8 +184,8 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
             // 2、设置参数
             execute = connectorInstance.execute(databaseTemplate -> databaseTemplate.batchUpdate(executeSql, batchRows(fields, data)));
         } catch (Exception e) {
-            if (config.isForceUpdate()) {
-                data.forEach(row -> forceUpdate(result, connectorInstance, config, pkFields, row));
+            if (context.isForceUpdate()) {
+                data.forEach(row -> forceUpdate(result, connectorInstance, context, pkFields, row));
             }
         }
 
@@ -208,8 +196,8 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
                     result.getSuccessData().add(data.get(i));
                     continue;
                 }
-                if (config.isForceUpdate()) {
-                    forceUpdate(result, connectorInstance, config, pkFields, data.get(i));
+                if (context.isForceUpdate()) {
+                    forceUpdate(result, connectorInstance, context, pkFields, data.get(i));
                 }
             }
         }
@@ -524,7 +512,7 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
             // 排除定时表达式
             if (QuartzFilterEnum.getQuartzFilterEnum(value) == null) {
                 // 系统函数表达式 $select max(update_time)$
-                Matcher matcher = Pattern.compile(SYS_EXPRESSION).matcher(value);
+                Matcher matcher = Pattern.compile("^[$].*[$]$").matcher(value);
                 if (matcher.find()) {
                     return StringUtil.substring(value, 1, value.length() - 1);
                 }
@@ -582,11 +570,11 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
         return args;
     }
 
-    private void forceUpdate(Result result, DatabaseConnectorInstance connectorInstance, WriterBatchConfig config, List<Field> pkFields,
+    private void forceUpdate(Result result, DatabaseConnectorInstance connectorInstance, PluginContext context, List<Field> pkFields,
                              Map row) {
-        if (isUpdate(config.getEvent()) || isInsert(config.getEvent())) {
+        if (isUpdate(context.getEvent()) || isInsert(context.getEvent())) {
             // 存在执行覆盖更新,否则写入
-            final String queryCount = config.getCommand().get(ConnectorConstant.OPERTION_QUERY_EXIST);
+            final String queryCount = context.getCommand().get(ConnectorConstant.OPERTION_QUERY_EXIST);
             int size = pkFields.size();
             Object[] args = new Object[size];
             for (int i = 0; i < size; i++) {
@@ -594,17 +582,17 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
             }
             final String event = existRow(connectorInstance, queryCount, args) ? ConnectorConstant.OPERTION_UPDATE
                     : ConnectorConstant.OPERTION_INSERT;
-            logger.warn("{}表执行{}失败, 重新执行{}, {}", config.getTableName(), config.getEvent(), event, row);
-            writer(result, connectorInstance, config, pkFields, row, event);
+            logger.warn("{}表执行{}失败, 重新执行{}, {}", context.getTargetTableName(), context.getEvent(), event, row);
+            writer(result, connectorInstance, context, pkFields, row, event);
         }
     }
 
-    private void writer(Result result, DatabaseConnectorInstance connectorInstance, WriterBatchConfig config, List<Field> pkFields, Map row,
+    private void writer(Result result, DatabaseConnectorInstance connectorInstance, PluginContext context, List<Field> pkFields, Map row,
                         String event) {
         // 1、获取 SQL
-        String sql = config.getCommand().get(event);
+        String sql = context.getCommand().get(event);
 
-        List<Field> fields = new ArrayList<>(config.getFields());
+        List<Field> fields = new ArrayList<>(context.getTargetFields());
         // Update / Delete
         if (!isInsert(event)) {
             if (isDelete(event)) {
@@ -674,7 +662,7 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
                 return true;
             });
             Map<String, String> successMap = new HashMap<>();
-            successMap.put("sql", config.getSql());
+            successMap.put(ConfigConstant.BINLOG_DATA, config.getSql());
             result.addSuccessData(Collections.singletonList(successMap));
         } catch (Exception e) {
             result.getError().append(String.format("执行ddl: %s, 异常:%s", config.getSql(), e.getMessage()));

+ 3 - 7
dbsyncer-sdk/src/main/java/org/dbsyncer/sdk/connector/database/sqlbuilder/SqlBuilderQuery.java

@@ -42,13 +42,9 @@ public class SqlBuilderQuery extends AbstractSqlBuilder {
         Field field = null;
         for (int i = 0; i < size; i++) {
             field = fields.get(i);
-            if (field.isUnmodifiabled()) {
-                sql.append(field.getName());
-            } else {
-                sql.append(quotation);
-                sql.append(database.buildFieldName(field));
-                sql.append(quotation);
-            }
+            sql.append(quotation);
+            sql.append(database.buildFieldName(field));
+            sql.append(quotation);
 
             // "USERNAME" as "myName"
             if (StringUtil.isNotBlank(field.getLabelName())) {

Some files were not shown because too many files changed in this diff