Explorar o código

优化增量写入

AE86 %!s(int64=3) %!d(string=hai) anos
pai
achega
da357f8d2c

+ 2 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ParserFactory.java

@@ -19,7 +19,7 @@ import org.dbsyncer.parser.enums.ParserEnum;
 import org.dbsyncer.parser.event.FullRefreshEvent;
 import org.dbsyncer.parser.event.FullRefreshEvent;
 import org.dbsyncer.parser.flush.BufferActuator;
 import org.dbsyncer.parser.flush.BufferActuator;
 import org.dbsyncer.parser.flush.FlushStrategy;
 import org.dbsyncer.parser.flush.FlushStrategy;
-import org.dbsyncer.parser.flush.model.WriterBufferTask;
+import org.dbsyncer.parser.flush.model.WriterRequest;
 import org.dbsyncer.parser.logger.LogService;
 import org.dbsyncer.parser.logger.LogService;
 import org.dbsyncer.parser.logger.LogType;
 import org.dbsyncer.parser.logger.LogType;
 import org.dbsyncer.parser.model.*;
 import org.dbsyncer.parser.model.*;
@@ -330,7 +330,7 @@ public class ParserFactory implements Parser {
         pluginFactory.convert(tableGroup.getPlugin(), event, data, target);
         pluginFactory.convert(tableGroup.getPlugin(), event, data, target);
 
 
         // 4、写入缓冲执行器
         // 4、写入缓冲执行器
-        writerBufferActuator.offer(new WriterBufferTask(metaId, tableGroup.getId(), event, tConnectorMapper, picker.getTargetFields(), tableGroup.getCommand(), target));
+        writerBufferActuator.offer(new WriterRequest(metaId, tableGroup.getId(), event, tConnectorMapper, picker.getTargetFields(), tableGroup.getCommand(), target));
     }
     }
 
 
     /**
     /**

+ 30 - 21
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferActuator.java

@@ -2,6 +2,8 @@ package org.dbsyncer.parser.flush;
 
 
 import org.dbsyncer.common.scheduled.ScheduledTaskJob;
 import org.dbsyncer.common.scheduled.ScheduledTaskJob;
 import org.dbsyncer.common.scheduled.ScheduledTaskService;
 import org.dbsyncer.common.scheduled.ScheduledTaskService;
+import org.dbsyncer.parser.flush.model.AbstractRequest;
+import org.dbsyncer.parser.flush.model.AbstractResponse;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,16 +20,16 @@ import java.util.concurrent.ConcurrentLinkedQueue;
  * @version 1.0.0
  * @version 1.0.0
  * @date 2022/3/27 17:36
  * @date 2022/3/27 17:36
  */
  */
-public abstract class AbstractBufferActuator<B, F> implements BufferActuator, ScheduledTaskJob {
+public abstract class AbstractBufferActuator<Request, Response> implements BufferActuator, ScheduledTaskJob {
 
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
 
     @Autowired
     @Autowired
     private ScheduledTaskService scheduledTaskService;
     private ScheduledTaskService scheduledTaskService;
 
 
-    private Queue<B> buffer = new ConcurrentLinkedQueue();
+    private Queue<Request> buffer = new ConcurrentLinkedQueue();
 
 
-    private Queue<B> temp = new ConcurrentLinkedQueue();
+    private Queue<Request> temp = new ConcurrentLinkedQueue();
 
 
     private final Object LOCK = new Object();
     private final Object LOCK = new Object();
 
 
@@ -35,46 +37,53 @@ public abstract class AbstractBufferActuator<B, F> implements BufferActuator, Sc
 
 
     @PostConstruct
     @PostConstruct
     private void init() {
     private void init() {
-        scheduledTaskService.start(300, this);
+        scheduledTaskService.start(getPeriod(), this);
     }
     }
 
 
+    /**
+     * 获取定时间隔(毫秒)
+     *
+     * @return
+     */
+    protected abstract long getPeriod();
+
     /**
     /**
      * 生成缓存value
      * 生成缓存value
      *
      *
      * @return
      * @return
      */
      */
-    protected abstract AbstractFlushTask getValue();
+    protected abstract AbstractResponse getValue();
 
 
     /**
     /**
      * 生成分区key
      * 生成分区key
      *
      *
-     * @param bufferTask
+     * @param request
      * @return
      * @return
      */
      */
-    protected abstract String getPartitionKey(B bufferTask);
+    protected abstract String getPartitionKey(Request request);
 
 
     /**
     /**
      * 分区
      * 分区
      *
      *
-     * @param bufferTask
-     * @param flushTask
+     * @param request
+     * @param response
      */
      */
-    protected abstract void partition(B bufferTask, F flushTask);
+    protected abstract void partition(Request request, Response response);
 
 
     /**
     /**
      * 异步批处理
      * 异步批处理
      *
      *
-     * @param flushTask
+     * @param response
      */
      */
-    protected abstract void flush(F flushTask);
+    protected abstract void flush(Response response);
 
 
     @Override
     @Override
-    public void offer(AbstractBufferTask task) {
+    public void offer(AbstractRequest task) {
         if (running) {
         if (running) {
-            temp.offer((B) task);
+            temp.offer((Request) task);
             return;
             return;
         }
         }
-        buffer.offer((B) task);
+        buffer.offer((Request) task);
     }
     }
 
 
     @Override
     @Override
@@ -93,26 +102,26 @@ public abstract class AbstractBufferActuator<B, F> implements BufferActuator, Sc
         }
         }
     }
     }
 
 
-    private void flush(Queue<B> queue) {
+    private void flush(Queue<Request> queue) {
         if (!queue.isEmpty()) {
         if (!queue.isEmpty()) {
-            final Map<String, AbstractFlushTask> map = new LinkedHashMap<>();
+            final Map<String, AbstractResponse> map = new LinkedHashMap<>();
             while (!queue.isEmpty()) {
             while (!queue.isEmpty()) {
-                B poll = queue.poll();
+                Request poll = queue.poll();
                 String key = getPartitionKey(poll);
                 String key = getPartitionKey(poll);
                 if (!map.containsKey(key)) {
                 if (!map.containsKey(key)) {
                     map.putIfAbsent(key, getValue());
                     map.putIfAbsent(key, getValue());
                 }
                 }
-                partition(poll, (F) map.get(key));
+                partition(poll, (Response) map.get(key));
             }
             }
 
 
             map.forEach((key, flushTask) -> {
             map.forEach((key, flushTask) -> {
                 long now = Instant.now().toEpochMilli();
                 long now = Instant.now().toEpochMilli();
                 try {
                 try {
-                    flush((F) flushTask);
+                    flush((Response) flushTask);
                 } catch (Exception e) {
                 } catch (Exception e) {
                     logger.error("[{}]-flush异常{}", key);
                     logger.error("[{}]-flush异常{}", key);
                 }
                 }
-                logger.info("[{}]-flush{}条,耗时{}秒", key, flushTask.getFlushTaskSize(), (Instant.now().toEpochMilli() - now) / 1000);
+                logger.info("[{}]-flush{}条,耗时{}秒", key, flushTask.getTaskSize(), (Instant.now().toEpochMilli() - now) / 1000);
             });
             });
             map.clear();
             map.clear();
         }
         }

+ 0 - 10
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractBufferTask.java

@@ -1,10 +0,0 @@
-package org.dbsyncer.parser.flush;
-
-/**
- * @author AE86
- * @version 1.0.0
- * @date 2022/3/27 16:57
- */
-public abstract class AbstractBufferTask {
-
-}

+ 3 - 1
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/BufferActuator.java

@@ -1,5 +1,7 @@
 package org.dbsyncer.parser.flush;
 package org.dbsyncer.parser.flush;
 
 
+import org.dbsyncer.parser.flush.model.AbstractRequest;
+
 /**
 /**
  * @author AE86
  * @author AE86
  * @version 1.0.0
  * @version 1.0.0
@@ -7,6 +9,6 @@ package org.dbsyncer.parser.flush;
  */
  */
 public interface BufferActuator {
 public interface BufferActuator {
 
 
-    void offer(AbstractBufferTask task);
+    void offer(AbstractRequest task);
 
 
 }
 }

+ 48 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/FlushBufferActuator.java

@@ -0,0 +1,48 @@
+package org.dbsyncer.parser.flush.impl;
+
+import org.dbsyncer.parser.flush.AbstractBufferActuator;
+import org.dbsyncer.parser.flush.model.AbstractResponse;
+import org.dbsyncer.parser.flush.model.FlushRequest;
+import org.dbsyncer.parser.flush.model.FlushResponse;
+import org.dbsyncer.storage.StorageService;
+import org.dbsyncer.storage.enums.StorageEnum;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/3/27 16:50
+ */
+@Component
+public class FlushBufferActuator extends AbstractBufferActuator<FlushRequest, FlushResponse> {
+
+    @Autowired
+    private StorageService storageService;
+
+    @Override
+    protected long getPeriod() {
+        return 3000;
+    }
+
+    @Override
+    protected AbstractResponse getValue() {
+        return new FlushResponse();
+    }
+
+    @Override
+    protected String getPartitionKey(FlushRequest bufferTask) {
+        return bufferTask.getMetaId();
+    }
+
+    @Override
+    protected void partition(FlushRequest request, FlushResponse response) {
+        response.setMetaId(request.getMetaId());
+        response.getDataList().addAll(request.getList());
+    }
+
+    @Override
+    protected void flush(FlushResponse response) {
+        storageService.addData(StorageEnum.DATA, response.getMetaId(), response.getDataList());
+    }
+}

+ 8 - 87
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/FlushServiceImpl.java

@@ -1,10 +1,10 @@
 package org.dbsyncer.parser.flush.impl;
 package org.dbsyncer.parser.flush.impl;
 
 
 import com.alibaba.fastjson.JSONException;
 import com.alibaba.fastjson.JSONException;
-import org.dbsyncer.common.scheduled.ScheduledTaskJob;
-import org.dbsyncer.common.scheduled.ScheduledTaskService;
 import org.dbsyncer.common.util.JsonUtil;
 import org.dbsyncer.common.util.JsonUtil;
+import org.dbsyncer.parser.flush.BufferActuator;
 import org.dbsyncer.parser.flush.FlushService;
 import org.dbsyncer.parser.flush.FlushService;
+import org.dbsyncer.parser.flush.model.FlushRequest;
 import org.dbsyncer.storage.SnowflakeIdWorker;
 import org.dbsyncer.storage.SnowflakeIdWorker;
 import org.dbsyncer.storage.StorageService;
 import org.dbsyncer.storage.StorageService;
 import org.dbsyncer.storage.constant.ConfigConstant;
 import org.dbsyncer.storage.constant.ConfigConstant;
@@ -15,12 +15,10 @@ import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 import org.springframework.stereotype.Component;
 
 
-import javax.annotation.PostConstruct;
 import java.time.Instant;
 import java.time.Instant;
-import java.util.*;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.Executor;
-import java.util.concurrent.TimeUnit;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 
 
@@ -34,7 +32,7 @@ import java.util.stream.Collectors;
  * @date 2020/05/19 18:38
  * @date 2020/05/19 18:38
  */
  */
 @Component
 @Component
-public class FlushServiceImpl implements FlushService, ScheduledTaskJob {
+public class FlushServiceImpl implements FlushService {
 
 
     private final Logger logger = LoggerFactory.getLogger(getClass());
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
 
@@ -45,23 +43,7 @@ public class FlushServiceImpl implements FlushService, ScheduledTaskJob {
     private SnowflakeIdWorker snowflakeIdWorker;
     private SnowflakeIdWorker snowflakeIdWorker;
 
 
     @Autowired
     @Autowired
-    private ScheduledTaskService scheduledTaskService;
-
-    @Autowired
-    private Executor taskExecutor;
-
-    private Queue<Task> buffer = new ConcurrentLinkedQueue();
-
-    private Queue<Task> temp = new ConcurrentLinkedQueue();
-
-    private final Object LOCK = new Object();
-
-    private volatile boolean running;
-
-    @PostConstruct
-    private void init() {
-        scheduledTaskService.start("*/3 * * * * ?", this);
-    }
+    private BufferActuator flushBufferActuator;
 
 
     @Override
     @Override
     public void asyncWrite(String type, String error) {
     public void asyncWrite(String type, String error) {
@@ -94,68 +76,7 @@ public class FlushServiceImpl implements FlushService, ScheduledTaskJob {
             return params;
             return params;
         }).collect(Collectors.toList());
         }).collect(Collectors.toList());
 
 
-        if (running) {
-            temp.offer(new Task(metaId, list));
-            return;
-        }
-
-        buffer.offer(new Task(metaId, list));
-    }
-
-    @Override
-    public void run() {
-        if (running) {
-            return;
-        }
-        synchronized (LOCK) {
-            if (running) {
-                return;
-            }
-            running = true;
-            flush(buffer);
-            running = false;
-            try {
-                TimeUnit.MILLISECONDS.sleep(10);
-            } catch (InterruptedException e) {
-                logger.error(e.getMessage());
-            }
-            flush(temp);
-        }
-    }
-
-    private void flush(Queue<Task> buffer) {
-        if (!buffer.isEmpty()) {
-            final Map<String, List<Map>> task = new LinkedHashMap<>();
-            while (!buffer.isEmpty()) {
-                Task t = buffer.poll();
-                if (!task.containsKey(t.metaId)) {
-                    task.putIfAbsent(t.metaId, new LinkedList<>());
-                }
-                task.get(t.metaId).addAll(t.list);
-            }
-            task.forEach((metaId, list) -> {
-                taskExecutor.execute(() -> {
-                    long now = Instant.now().toEpochMilli();
-                    try {
-                        storageService.addData(StorageEnum.DATA, metaId, list);
-                    } catch (Exception e) {
-                        logger.error("[{}]-flushData异常{}", metaId, list.size());
-                    }
-                    logger.info("[{}]-flushData{}条,耗时{}秒", metaId, list.size(), (Instant.now().toEpochMilli() - now) / 1000);
-                });
-            });
-            task.clear();
-        }
-    }
-
-    final class Task {
-        String metaId;
-        List<Map> list;
-
-        public Task(String metaId, List<Map> list) {
-            this.metaId = metaId;
-            this.list = list;
-        }
+        flushBufferActuator.offer(new FlushRequest(metaId, list));
     }
     }
 
 
 }
 }

+ 25 - 20
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/WriterBufferActuator.java

@@ -3,10 +3,10 @@ package org.dbsyncer.parser.flush.impl;
 import org.dbsyncer.common.model.Result;
 import org.dbsyncer.common.model.Result;
 import org.dbsyncer.parser.ParserFactory;
 import org.dbsyncer.parser.ParserFactory;
 import org.dbsyncer.parser.flush.AbstractBufferActuator;
 import org.dbsyncer.parser.flush.AbstractBufferActuator;
-import org.dbsyncer.parser.flush.AbstractFlushTask;
+import org.dbsyncer.parser.flush.model.AbstractResponse;
 import org.dbsyncer.parser.flush.FlushStrategy;
 import org.dbsyncer.parser.flush.FlushStrategy;
-import org.dbsyncer.parser.flush.model.WriterBufferTask;
-import org.dbsyncer.parser.flush.model.WriterFlushTask;
+import org.dbsyncer.parser.flush.model.WriterRequest;
+import org.dbsyncer.parser.flush.model.WriterResponse;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 import org.springframework.stereotype.Component;
 
 
@@ -18,7 +18,7 @@ import java.util.Collections;
  * @date 2022/3/27 16:50
  * @date 2022/3/27 16:50
  */
  */
 @Component
 @Component
-public class WriterBufferActuator extends AbstractBufferActuator<WriterBufferTask, WriterFlushTask> {
+public class WriterBufferActuator extends AbstractBufferActuator<WriterRequest, WriterResponse> {
 
 
     @Autowired
     @Autowired
     private ParserFactory parserFactory;
     private ParserFactory parserFactory;
@@ -29,32 +29,37 @@ public class WriterBufferActuator extends AbstractBufferActuator<WriterBufferTas
     private final static int BATCH_SIZE = 100;
     private final static int BATCH_SIZE = 100;
 
 
     @Override
     @Override
-    protected AbstractFlushTask getValue() {
-        return new WriterFlushTask();
+    protected long getPeriod() {
+        return 300;
     }
     }
 
 
     @Override
     @Override
-    protected String getPartitionKey(WriterBufferTask bufferTask) {
-        return new StringBuilder(bufferTask.getTableGroupId()).append(bufferTask.getEvent()).toString();
+    protected AbstractResponse getValue() {
+        return new WriterResponse();
     }
     }
 
 
     @Override
     @Override
-    protected void partition(WriterBufferTask bufferTask, WriterFlushTask flushTask) {
-        flushTask.getDataList().add(bufferTask.getRow());
-        if (flushTask.isMerged()) {
+    protected String getPartitionKey(WriterRequest request) {
+        return new StringBuilder(request.getTableGroupId()).append("-").append(request.getEvent()).toString();
+    }
+
+    @Override
+    protected void partition(WriterRequest request, WriterResponse response) {
+        response.getDataList().add(request.getRow());
+        if (response.isMerged()) {
             return;
             return;
         }
         }
-        flushTask.setMetaId(bufferTask.getMetaId());
-        flushTask.setEvent(bufferTask.getEvent());
-        flushTask.setConnectorMapper(bufferTask.getConnectorMapper());
-        flushTask.setFields(Collections.unmodifiableList(bufferTask.getFields()));
-        flushTask.setCommand(bufferTask.getCommand());
-        flushTask.setMerged(true);
+        response.setMetaId(request.getMetaId());
+        response.setEvent(request.getEvent());
+        response.setConnectorMapper(request.getConnectorMapper());
+        response.setFields(Collections.unmodifiableList(request.getFields()));
+        response.setCommand(request.getCommand());
+        response.setMerged(true);
     }
     }
 
 
     @Override
     @Override
-    protected void flush(WriterFlushTask flushTask) {
-        Result result = parserFactory.writeBatch(flushTask.getConnectorMapper(), flushTask.getCommand(), flushTask.getEvent(), flushTask.getFields(), flushTask.getDataList(), BATCH_SIZE);
-        flushStrategy.flushIncrementData(flushTask.getMetaId(), result, flushTask.getEvent(), flushTask.getDataList());
+    protected void flush(WriterResponse response) {
+        Result result = parserFactory.writeBatch(response.getConnectorMapper(), response.getCommand(), response.getEvent(), response.getFields(), response.getDataList(), BATCH_SIZE);
+        flushStrategy.flushIncrementData(response.getMetaId(), result, response.getEvent(), response.getDataList());
     }
     }
 }
 }

+ 10 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/AbstractRequest.java

@@ -0,0 +1,10 @@
+package org.dbsyncer.parser.flush.model;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/3/27 16:57
+ */
+public abstract class AbstractRequest {
+
+}

+ 3 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractFlushTask.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/AbstractResponse.java

@@ -1,17 +1,17 @@
-package org.dbsyncer.parser.flush;
+package org.dbsyncer.parser.flush.model;
 
 
 /**
 /**
  * @author AE86
  * @author AE86
  * @version 1.0.0
  * @version 1.0.0
  * @date 2022/3/27 18:11
  * @date 2022/3/27 18:11
  */
  */
-public abstract class AbstractFlushTask {
+public abstract class AbstractResponse {
 
 
     /**
     /**
      * 获取批处理数
      * 获取批处理数
      *
      *
      * @return
      * @return
      */
      */
-    public abstract int getFlushTaskSize();
+    public abstract int getTaskSize();
 
 
 }
 }

+ 29 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/FlushRequest.java

@@ -0,0 +1,29 @@
+package org.dbsyncer.parser.flush.model;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/3/27 16:57
+ */
+public class FlushRequest extends AbstractRequest {
+
+    private String metaId;
+
+    private List<Map> list;
+
+    public FlushRequest(String metaId, List<Map> list) {
+        this.metaId = metaId;
+        this.list = list;
+    }
+
+    public String getMetaId() {
+        return metaId;
+    }
+
+    public List<Map> getList() {
+        return list;
+    }
+}

+ 37 - 0
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/FlushResponse.java

@@ -0,0 +1,37 @@
+package org.dbsyncer.parser.flush.model;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author AE86
+ * @version 1.0.0
+ * @date 2022/3/27 16:57
+ */
+public class FlushResponse extends AbstractResponse {
+
+    private String metaId;
+    private List<Map> dataList = new LinkedList<>();
+
+    public String getMetaId() {
+        return metaId;
+    }
+
+    public void setMetaId(String metaId) {
+        this.metaId = metaId;
+    }
+
+    public List<Map> getDataList() {
+        return dataList;
+    }
+
+    public void setDataList(List<Map> dataList) {
+        this.dataList = dataList;
+    }
+
+    @Override
+    public int getTaskSize() {
+        return dataList.size();
+    }
+}

+ 2 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/WriterBufferTask.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/WriterRequest.java

@@ -2,7 +2,6 @@ package org.dbsyncer.parser.flush.model;
 
 
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.config.Field;
 import org.dbsyncer.connector.config.Field;
-import org.dbsyncer.parser.flush.AbstractBufferTask;
 
 
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -12,7 +11,7 @@ import java.util.Map;
  * @version 1.0.0
  * @version 1.0.0
  * @date 2022/3/27 16:57
  * @date 2022/3/27 16:57
  */
  */
-public class WriterBufferTask extends AbstractBufferTask {
+public class WriterRequest extends AbstractRequest {
 
 
     private String metaId;
     private String metaId;
 
 
@@ -28,7 +27,7 @@ public class WriterBufferTask extends AbstractBufferTask {
 
 
     private Map row;
     private Map row;
 
 
-    public WriterBufferTask(String metaId, String tableGroupId, String event, ConnectorMapper connectorMapper, List<Field> fields, Map<String, String> command, Map row) {
+    public WriterRequest(String metaId, String tableGroupId, String event, ConnectorMapper connectorMapper, List<Field> fields, Map<String, String> command, Map row) {
         this.metaId = metaId;
         this.metaId = metaId;
         this.tableGroupId = tableGroupId;
         this.tableGroupId = tableGroupId;
         this.event = event;
         this.event = event;

+ 2 - 3
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/WriterFlushTask.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/model/WriterResponse.java

@@ -2,7 +2,6 @@ package org.dbsyncer.parser.flush.model;
 
 
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.ConnectorMapper;
 import org.dbsyncer.connector.config.Field;
 import org.dbsyncer.connector.config.Field;
-import org.dbsyncer.parser.flush.AbstractFlushTask;
 
 
 import java.util.LinkedList;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.List;
@@ -13,7 +12,7 @@ import java.util.Map;
  * @version 1.0.0
  * @version 1.0.0
  * @date 2022/3/27 18:11
  * @date 2022/3/27 18:11
  */
  */
-public class WriterFlushTask extends AbstractFlushTask {
+public class WriterResponse extends AbstractResponse {
 
 
     private boolean isMerged;
     private boolean isMerged;
 
 
@@ -86,7 +85,7 @@ public class WriterFlushTask extends AbstractFlushTask {
     }
     }
 
 
     @Override
     @Override
-    public int getFlushTaskSize() {
+    public int getTaskSize() {
         return dataList.size();
         return dataList.size();
     }
     }