Răsfoiți Sursa

move package

AE86 3 ani în urmă
părinte
comite
c360f74225

+ 4 - 4
dbsyncer-connector/src/main/java/org/dbsyncer/connector/database/AbstractDatabaseConnector.java

@@ -106,9 +106,9 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
         List<Field> fields = config.getFields();
         List<Map> data = config.getData();
 
-        // 1、获取select SQL
-        String insertSql = config.getCommand().get(SqlBuilderEnum.INSERT.getName());
-        Assert.hasText(insertSql, "插入语句不能为空.");
+        // 1、获取SQL
+        String executeSql = config.getCommand().get(config.getEvent());
+        Assert.hasText(executeSql, "执行SQL语句不能为空.");
         if (CollectionUtils.isEmpty(fields)) {
             logger.error("writer fields can not be empty.");
             throw new ConnectorException("writer fields can not be empty.");
@@ -124,7 +124,7 @@ public abstract class AbstractDatabaseConnector extends AbstractConnector implem
         try {
             // 2、设置参数
             connectorMapper.execute(databaseTemplate -> {
-                databaseTemplate.batchUpdate(insertSql, new BatchPreparedStatementSetter() {
+                databaseTemplate.batchUpdate(executeSql, new BatchPreparedStatementSetter() {
                     @Override
                     public void setValues(PreparedStatement preparedStatement, int i) {
                         batchRowsSetter(databaseTemplate.getConnection(), preparedStatement, fields, fSize, data.get(i));

+ 1 - 1
dbsyncer-parser/src/main/java/org/dbsyncer/parser/ParserFactory.java

@@ -20,7 +20,7 @@ import org.dbsyncer.parser.event.FullRefreshEvent;
 import org.dbsyncer.parser.logger.LogService;
 import org.dbsyncer.parser.logger.LogType;
 import org.dbsyncer.parser.model.*;
-import org.dbsyncer.parser.strategy.FlushStrategy;
+import org.dbsyncer.parser.flush.FlushStrategy;
 import org.dbsyncer.parser.util.ConvertUtil;
 import org.dbsyncer.parser.util.PickerUtil;
 import org.dbsyncer.plugin.PluginFactory;

+ 2 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/config/ParserFlushStrategyConfiguration.java

@@ -1,7 +1,7 @@
 package org.dbsyncer.parser.config;
 
-import org.dbsyncer.parser.strategy.FlushStrategy;
-import org.dbsyncer.parser.strategy.impl.DisableFullFlushStrategy;
+import org.dbsyncer.parser.flush.FlushStrategy;
+import org.dbsyncer.parser.flush.impl.DisableFullFlushStrategy;
 import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;

+ 1 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/AbstractFlushStrategy.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/AbstractFlushStrategy.java

@@ -1,8 +1,7 @@
-package org.dbsyncer.parser.strategy;
+package org.dbsyncer.parser.flush;
 
 import org.dbsyncer.cache.CacheService;
 import org.dbsyncer.common.model.Result;
-import org.dbsyncer.parser.flush.FlushService;
 import org.dbsyncer.parser.model.Meta;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.util.Assert;

+ 1 - 1
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/FlushStrategy.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/FlushStrategy.java

@@ -1,4 +1,4 @@
-package org.dbsyncer.parser.strategy;
+package org.dbsyncer.parser.flush;
 
 import org.dbsyncer.common.model.Result;
 

+ 2 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/DisableFullFlushStrategy.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/DisableFullFlushStrategy.java

@@ -1,9 +1,9 @@
-package org.dbsyncer.parser.strategy.impl;
+package org.dbsyncer.parser.flush.impl;
 
 import org.dbsyncer.common.model.Result;
+import org.dbsyncer.parser.flush.AbstractFlushStrategy;
 import org.dbsyncer.parser.logger.LogService;
 import org.dbsyncer.parser.logger.LogType;
-import org.dbsyncer.parser.strategy.AbstractFlushStrategy;
 import org.springframework.beans.factory.annotation.Autowired;
 
 import java.util.List;

+ 2 - 2
dbsyncer-parser/src/main/java/org/dbsyncer/parser/strategy/impl/EnableFlushStrategy.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/EnableFlushStrategy.java

@@ -1,6 +1,6 @@
-package org.dbsyncer.parser.strategy.impl;
+package org.dbsyncer.parser.flush.impl;
 
-import org.dbsyncer.parser.strategy.AbstractFlushStrategy;
+import org.dbsyncer.parser.flush.AbstractFlushStrategy;
 import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.stereotype.Component;
 

+ 160 - 159
dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/FlushServiceImpl.java → dbsyncer-parser/src/main/java/org/dbsyncer/parser/flush/impl/FlushServiceImpl.java

@@ -1,160 +1,161 @@
-package org.dbsyncer.parser.flush;
-
-import com.alibaba.fastjson.JSONException;
-import org.dbsyncer.common.scheduled.ScheduledTaskJob;
-import org.dbsyncer.common.scheduled.ScheduledTaskService;
-import org.dbsyncer.common.util.JsonUtil;
-import org.dbsyncer.storage.SnowflakeIdWorker;
-import org.dbsyncer.storage.StorageService;
-import org.dbsyncer.storage.constant.ConfigConstant;
-import org.dbsyncer.storage.enums.StorageDataStatusEnum;
-import org.dbsyncer.storage.enums.StorageEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-import javax.annotation.PostConstruct;
-import java.time.Instant;
-import java.util.*;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.Executor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.stream.Collectors;
-
-/**
- * 持久化
- * <p>全量或增量数据</p>
- * <p>系统日志</p>
- *
- * @author AE86
- * @version 1.0.0
- * @date 2020/05/19 18:38
- */
-@Component
-public class FlushServiceImpl implements FlushService, ScheduledTaskJob {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    @Autowired
-    private StorageService storageService;
-
-    @Autowired
-    private SnowflakeIdWorker snowflakeIdWorker;
-
-    @Autowired
-    private ScheduledTaskService scheduledTaskService;
-
-    @Autowired
-    private Executor taskExecutor;
-
-    private Queue<Task> buffer = new ConcurrentLinkedQueue();
-
-    private Queue<Task> temp = new ConcurrentLinkedQueue();
-
-    private final Object LOCK = new Object();
-
-    private volatile boolean running;
-
-    @PostConstruct
-    private void init() {
-        scheduledTaskService.start("*/3 * * * * ?", this);
-    }
-
-    @Override
-    public void asyncWrite(String type, String error) {
-        Map<String, Object> params = new HashMap();
-        params.put(ConfigConstant.CONFIG_MODEL_ID, String.valueOf(snowflakeIdWorker.nextId()));
-        params.put(ConfigConstant.CONFIG_MODEL_TYPE, type);
-        params.put(ConfigConstant.CONFIG_MODEL_JSON, error);
-        params.put(ConfigConstant.CONFIG_MODEL_CREATE_TIME, Instant.now().toEpochMilli());
-        storageService.addLog(StorageEnum.LOG, params);
-    }
-
-    @Override
-    public void asyncWrite(String metaId, String event, boolean success, List<Map> data, String error) {
-        long now = Instant.now().toEpochMilli();
-        AtomicBoolean added = new AtomicBoolean(false);
-        List<Map> list = data.parallelStream().map(r -> {
-            Map<String, Object> params = new HashMap();
-            params.put(ConfigConstant.CONFIG_MODEL_ID, String.valueOf(snowflakeIdWorker.nextId()));
-            params.put(ConfigConstant.DATA_SUCCESS, success ? StorageDataStatusEnum.SUCCESS.getValue() : StorageDataStatusEnum.FAIL.getValue());
-            params.put(ConfigConstant.DATA_EVENT, event);
-            params.put(ConfigConstant.DATA_ERROR, added.get() ? "" : error);
-            try {
-                params.put(ConfigConstant.CONFIG_MODEL_JSON, JsonUtil.objToJson(r));
-            } catch (JSONException e) {
-                logger.warn("可能存在Blob或inputStream大文件类型, 无法序列化:{}", r);
-                params.put(ConfigConstant.CONFIG_MODEL_JSON, r.toString());
-            }
-            params.put(ConfigConstant.CONFIG_MODEL_CREATE_TIME, now);
-            added.set(true);
-            return params;
-        }).collect(Collectors.toList());
-
-        if (running) {
-            temp.offer(new Task(metaId, list));
-            return;
-        }
-
-        buffer.offer(new Task(metaId, list));
-    }
-
-    @Override
-    public void run() {
-        if (running) {
-            return;
-        }
-        synchronized (LOCK) {
-            if (running) {
-                return;
-            }
-            running = true;
-            flush(buffer);
-            running = false;
-            try {
-                TimeUnit.MILLISECONDS.sleep(10);
-            } catch (InterruptedException e) {
-                logger.error(e.getMessage());
-            }
-            flush(temp);
-        }
-    }
-
-    private void flush(Queue<Task> buffer) {
-        if (!buffer.isEmpty()) {
-            final Map<String, List<Map>> task = new LinkedHashMap<>();
-            while (!buffer.isEmpty()) {
-                Task t = buffer.poll();
-                if (!task.containsKey(t.metaId)) {
-                    task.putIfAbsent(t.metaId, new LinkedList<>());
-                }
-                task.get(t.metaId).addAll(t.list);
-            }
-            task.forEach((metaId, list) -> {
-                taskExecutor.execute(() -> {
-                    long now = Instant.now().toEpochMilli();
-                    try {
-                        storageService.addData(StorageEnum.DATA, metaId, list);
-                    } catch (Exception e) {
-                        logger.error("[{}]-flush异常{}", metaId, list.size());
-                    }
-                    logger.info("[{}]-flush{}条,耗时{}秒", metaId, list.size(), (Instant.now().toEpochMilli() - now) / 1000);
-                });
-            });
-            task.clear();
-        }
-    }
-
-    final class Task {
-        String metaId;
-        List<Map> list;
-
-        public Task(String metaId, List<Map> list) {
-            this.metaId = metaId;
-            this.list = list;
-        }
-    }
-
+package org.dbsyncer.parser.flush.impl;
+
+import com.alibaba.fastjson.JSONException;
+import org.dbsyncer.common.scheduled.ScheduledTaskJob;
+import org.dbsyncer.common.scheduled.ScheduledTaskService;
+import org.dbsyncer.common.util.JsonUtil;
+import org.dbsyncer.parser.flush.FlushService;
+import org.dbsyncer.storage.SnowflakeIdWorker;
+import org.dbsyncer.storage.StorageService;
+import org.dbsyncer.storage.constant.ConfigConstant;
+import org.dbsyncer.storage.enums.StorageDataStatusEnum;
+import org.dbsyncer.storage.enums.StorageEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.time.Instant;
+import java.util.*;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.Executor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
+
+/**
+ * 持久化
+ * <p>全量或增量数据</p>
+ * <p>系统日志</p>
+ *
+ * @author AE86
+ * @version 1.0.0
+ * @date 2020/05/19 18:38
+ */
+@Component
+public class FlushServiceImpl implements FlushService, ScheduledTaskJob {
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    @Autowired
+    private StorageService storageService;
+
+    @Autowired
+    private SnowflakeIdWorker snowflakeIdWorker;
+
+    @Autowired
+    private ScheduledTaskService scheduledTaskService;
+
+    @Autowired
+    private Executor taskExecutor;
+
+    private Queue<Task> buffer = new ConcurrentLinkedQueue();
+
+    private Queue<Task> temp = new ConcurrentLinkedQueue();
+
+    private final Object LOCK = new Object();
+
+    private volatile boolean running;
+
+    @PostConstruct
+    private void init() {
+        scheduledTaskService.start("*/3 * * * * ?", this);
+    }
+
+    @Override
+    public void asyncWrite(String type, String error) {
+        Map<String, Object> params = new HashMap();
+        params.put(ConfigConstant.CONFIG_MODEL_ID, String.valueOf(snowflakeIdWorker.nextId()));
+        params.put(ConfigConstant.CONFIG_MODEL_TYPE, type);
+        params.put(ConfigConstant.CONFIG_MODEL_JSON, error);
+        params.put(ConfigConstant.CONFIG_MODEL_CREATE_TIME, Instant.now().toEpochMilli());
+        storageService.addLog(StorageEnum.LOG, params);
+    }
+
+    @Override
+    public void asyncWrite(String metaId, String event, boolean success, List<Map> data, String error) {
+        long now = Instant.now().toEpochMilli();
+        AtomicBoolean added = new AtomicBoolean(false);
+        List<Map> list = data.parallelStream().map(r -> {
+            Map<String, Object> params = new HashMap();
+            params.put(ConfigConstant.CONFIG_MODEL_ID, String.valueOf(snowflakeIdWorker.nextId()));
+            params.put(ConfigConstant.DATA_SUCCESS, success ? StorageDataStatusEnum.SUCCESS.getValue() : StorageDataStatusEnum.FAIL.getValue());
+            params.put(ConfigConstant.DATA_EVENT, event);
+            params.put(ConfigConstant.DATA_ERROR, added.get() ? "" : error);
+            try {
+                params.put(ConfigConstant.CONFIG_MODEL_JSON, JsonUtil.objToJson(r));
+            } catch (JSONException e) {
+                logger.warn("可能存在Blob或inputStream大文件类型, 无法序列化:{}", r);
+                params.put(ConfigConstant.CONFIG_MODEL_JSON, r.toString());
+            }
+            params.put(ConfigConstant.CONFIG_MODEL_CREATE_TIME, now);
+            added.set(true);
+            return params;
+        }).collect(Collectors.toList());
+
+        if (running) {
+            temp.offer(new Task(metaId, list));
+            return;
+        }
+
+        buffer.offer(new Task(metaId, list));
+    }
+
+    @Override
+    public void run() {
+        if (running) {
+            return;
+        }
+        synchronized (LOCK) {
+            if (running) {
+                return;
+            }
+            running = true;
+            flush(buffer);
+            running = false;
+            try {
+                TimeUnit.MILLISECONDS.sleep(10);
+            } catch (InterruptedException e) {
+                logger.error(e.getMessage());
+            }
+            flush(temp);
+        }
+    }
+
+    private void flush(Queue<Task> buffer) {
+        if (!buffer.isEmpty()) {
+            final Map<String, List<Map>> task = new LinkedHashMap<>();
+            while (!buffer.isEmpty()) {
+                Task t = buffer.poll();
+                if (!task.containsKey(t.metaId)) {
+                    task.putIfAbsent(t.metaId, new LinkedList<>());
+                }
+                task.get(t.metaId).addAll(t.list);
+            }
+            task.forEach((metaId, list) -> {
+                taskExecutor.execute(() -> {
+                    long now = Instant.now().toEpochMilli();
+                    try {
+                        storageService.addData(StorageEnum.DATA, metaId, list);
+                    } catch (Exception e) {
+                        logger.error("[{}]-flush异常{}", metaId, list.size());
+                    }
+                    logger.info("[{}]-flush{}条,耗时{}秒", metaId, list.size(), (Instant.now().toEpochMilli() - now) / 1000);
+                });
+            });
+            task.clear();
+        }
+    }
+
+    final class Task {
+        String metaId;
+        List<Map> list;
+
+        public Task(String metaId, List<Map> list) {
+            this.metaId = metaId;
+            this.list = list;
+        }
+    }
+
 }