浏览代码

add module dbsyncer-connector-kafka

AE86 1 年之前
父节点
当前提交
e489185c90
共有 15 个文件被更改,包括 512 次插入474 次删除
  1. 0 0
      dbsyncer-connector/dbsyncer-connector-elasticsearch/src/test/java/ESClientTest.java
  2. 42 0
      dbsyncer-connector/dbsyncer-connector-kafka/pom.xml
  3. 105 98
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaClient.java
  4. 155 155
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnector.java
  5. 12 2
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnectorInstance.java
  6. 146 142
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/config/KafkaConfig.java
  7. 8 6
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/enums/KafkaFieldTypeEnum.java
  8. 6 3
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/JsonToMapDeserializer.java
  9. 6 3
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/MapToJsonSerializer.java
  10. 12 2
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/util/KafkaUtil.java
  11. 3 3
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/validator/KafkaConfigValidator.java
  12. 1 0
      dbsyncer-connector/dbsyncer-connector-kafka/src/main/resources/META-INF/services/org.dbsyncer.sdk.spi.ConnectorService
  13. 7 3
      dbsyncer-connector/dbsyncer-connector-kafka/src/test/java/KafkaClientTest.java
  14. 9 7
      dbsyncer-connector/pom.xml
  15. 0 50
      dbsyncer-connector/src/main/java/org/dbsyncer/connector/config/FileConfig.java

+ 0 - 0
dbsyncer-connector/dbsyncer-connector-elasticsearch/src/test/ESClientTest.java → dbsyncer-connector/dbsyncer-connector-elasticsearch/src/test/java/ESClientTest.java


+ 42 - 0
dbsyncer-connector/dbsyncer-connector-kafka/pom.xml

@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>dbsyncer-connector</artifactId>
+        <groupId>org.ghi</groupId>
+        <version>2.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>dbsyncer-connector-kafka</artifactId>
+
+    <dependencies>
+        <!-- sdk -->
+        <dependency>
+            <groupId>org.ghi</groupId>
+            <artifactId>dbsyncer-sdk</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <!-- kafka -->
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-log4j2</artifactId>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>provided</scope>
+        </dependency>
+
+    </dependencies>
+</project>

+ 105 - 98
dbsyncer-connector/src/main/java/org/dbsyncer/connector/kafka/KafkaClient.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaClient.java

@@ -1,99 +1,106 @@
-package org.dbsyncer.connector.kafka;
-
-import org.apache.kafka.clients.CommonClientConfigs;
-import org.apache.kafka.clients.NetworkClient;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.Node;
-import org.dbsyncer.connector.ConnectorException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Kafka客户端,集成消费者、生产者API
- */
-public class KafkaClient {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    private KafkaConsumer consumer;
-    private KafkaProducer producer;
-    private NetworkClient networkClient;
-
-    public KafkaClient(KafkaConsumer consumer, KafkaProducer producer) {
-        this.consumer = consumer;
-        this.producer = producer;
-    }
-
-    public boolean ping() {
-        if (null == networkClient) {
-            synchronized (this) {
-                if (null == networkClient) {
-                    try {
-                        networkClient = (NetworkClient) invoke(invoke(consumer, "client"), "client");
-                    } catch (NoSuchFieldException e) {
-                        logger.error(e.getMessage());
-                    } catch (IllegalAccessException e) {
-                        logger.error(e.getMessage());
-                    }
-                }
-            }
-        }
-        final Node node = networkClient.leastLoadedNode(0);
-        telnet(node.host(), node.port(), 5000);
-        return true;
-    }
-
-    private boolean telnet(String host, int port, int timeout) {
-        Socket socket = new Socket();
-        try {
-            socket.connect(new InetSocketAddress(host, port), timeout);
-            return socket.isConnected();
-        } catch (IOException e) {
-            throw new ConnectorException(String.format("DNS resolution failed for url in %s %s:%s", CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, host, port));
-        } finally {
-            try {
-                socket.close();
-            } catch (IOException e) {
-                // nothing to do
-            }
-        }
-    }
-
-    private Object invoke(Object obj, String declaredFieldName) throws NoSuchFieldException, IllegalAccessException {
-        final Field field = obj.getClass().getDeclaredField(declaredFieldName);
-        field.setAccessible(true);
-        return field.get(obj);
-    }
-
-    public void close() {
-        if (null != producer) {
-            producer.close();
-        }
-        if (null != consumer) {
-            consumer.close();
-        }
-    }
-
-    public void subscribe(List<String> topics) {
-        consumer.subscribe(topics);
-    }
-
-    public ConsumerRecords<String, Object> poll(long timeout) {
-        return consumer.poll(timeout);
-    }
-
-    public void send(String topic, String key, Map<String, Object> map) {
-        producer.send(new ProducerRecord<>(topic, key, map));
-    }
-
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+package org.dbsyncer.connector.kafka;
+
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.NetworkClient;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.Node;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Kafka客户端,集成消费者、生产者API
+ *
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-12-16 23:09
+ */
+public class KafkaClient {
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    private KafkaConsumer consumer;
+    private KafkaProducer producer;
+    private NetworkClient networkClient;
+
+    public KafkaClient(KafkaConsumer consumer, KafkaProducer producer) {
+        this.consumer = consumer;
+        this.producer = producer;
+    }
+
+    public boolean ping() {
+        if (null == networkClient) {
+            synchronized (this) {
+                if (null == networkClient) {
+                    try {
+                        networkClient = (NetworkClient) invoke(invoke(consumer, "client"), "client");
+                    } catch (NoSuchFieldException e) {
+                        logger.error(e.getMessage());
+                    } catch (IllegalAccessException e) {
+                        logger.error(e.getMessage());
+                    }
+                }
+            }
+        }
+        final Node node = networkClient.leastLoadedNode(0);
+        telnet(node.host(), node.port(), 5000);
+        return true;
+    }
+
+    private boolean telnet(String host, int port, int timeout) {
+        Socket socket = new Socket();
+        try {
+            socket.connect(new InetSocketAddress(host, port), timeout);
+            return socket.isConnected();
+        } catch (IOException e) {
+            throw new KafkaException(String.format("DNS resolution failed for url in %s %s:%s", CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, host, port));
+        } finally {
+            try {
+                socket.close();
+            } catch (IOException e) {
+                // nothing to do
+            }
+        }
+    }
+
+    private Object invoke(Object obj, String declaredFieldName) throws NoSuchFieldException, IllegalAccessException {
+        final Field field = obj.getClass().getDeclaredField(declaredFieldName);
+        field.setAccessible(true);
+        return field.get(obj);
+    }
+
+    public void close() {
+        if (null != producer) {
+            producer.close();
+        }
+        if (null != consumer) {
+            consumer.close();
+        }
+    }
+
+    public void subscribe(List<String> topics) {
+        consumer.subscribe(topics);
+    }
+
+    public ConsumerRecords<String, Object> poll(long timeout) {
+        return consumer.poll(timeout);
+    }
+
+    public void send(String topic, String key, Map<String, Object> map) {
+        producer.send(new ProducerRecord<>(topic, key, map));
+    }
+
 }

+ 155 - 155
dbsyncer-connector/src/main/java/org/dbsyncer/connector/kafka/KafkaConnector.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnector.java

@@ -1,156 +1,156 @@
-/**
- * DBSyncer Copyright 2020-2023 All Rights Reserved.
- */
-package org.dbsyncer.connector.kafka;
-
-import org.dbsyncer.common.model.Result;
-import org.dbsyncer.common.util.CollectionUtils;
-import org.dbsyncer.common.util.JsonUtil;
-import org.dbsyncer.connector.ConnectorException;
-import org.dbsyncer.connector.config.KafkaConfig;
-import org.dbsyncer.sdk.config.CommandConfig;
-import org.dbsyncer.sdk.config.ReaderConfig;
-import org.dbsyncer.sdk.config.WriterBatchConfig;
-import org.dbsyncer.sdk.connector.AbstractConnector;
-import org.dbsyncer.sdk.connector.ConfigValidator;
-import org.dbsyncer.sdk.connector.ConnectorInstance;
-import org.dbsyncer.sdk.listener.Listener;
-import org.dbsyncer.sdk.model.Field;
-import org.dbsyncer.sdk.model.MetaInfo;
-import org.dbsyncer.sdk.model.Table;
-import org.dbsyncer.sdk.spi.ConnectorService;
-import org.dbsyncer.sdk.util.PrimaryKeyUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Kafka连接器实现
- *
- * @Author AE86
- * @Version 1.0.0
- * @Date 2021-11-22 23:55
- */
-@Component
-public class KafkaConnector extends AbstractConnector implements ConnectorService<KafkaConnectorInstance, KafkaConfig> {
-
-    private final Logger logger = LoggerFactory.getLogger(getClass());
-
-    private final String TYPE = "Kafka";
-
-    @Override
-    public String getConnectorType() {
-        return TYPE;
-    }
-
-    @Override
-    public boolean isSupportedTiming() {
-        return false;
-    }
-
-    @Override
-    public boolean isSupportedLog() {
-        return false;
-    }
-
-    @Override
-    public Class<KafkaConfig> getConfigClass() {
-        return KafkaConfig.class;
-    }
-
-    @Override
-    public ConnectorInstance connect(KafkaConfig config) {
-        return new KafkaConnectorInstance(config);
-    }
-
-    @Override
-    public ConfigValidator getConfigValidator() {
-        return null;
-    }
-
-    @Override
-    public void disconnect(KafkaConnectorInstance connectorInstance) {
-        connectorInstance.close();
-    }
-
-    @Override
-    public boolean isAlive(KafkaConnectorInstance connectorInstance) {
-        return connectorInstance.getConnection().ping();
-    }
-
-    @Override
-    public String getConnectorInstanceCacheKey(KafkaConfig config) {
-        return String.format("%s-%s-%s-%s", config.getConnectorType(), config.getBootstrapServers(), config.getTopic(), config.getGroupId());
-    }
-
-    @Override
-    public List<Table> getTable(KafkaConnectorInstance connectorInstance) {
-        List<Table> topics = new ArrayList<>();
-        topics.add(new Table(connectorInstance.getConfig().getTopic()));
-        return topics;
-    }
-
-    @Override
-    public MetaInfo getMetaInfo(KafkaConnectorInstance connectorInstance, String tableName) {
-        KafkaConfig config = connectorInstance.getConfig();
-        List<Field> fields = JsonUtil.jsonToArray(config.getFields(), Field.class);
-        return new MetaInfo().setColumn(fields);
-    }
-
-    @Override
-    public long getCount(KafkaConnectorInstance connectorInstance, Map<String, String> command) {
-        return 0;
-    }
-
-    @Override
-    public Result reader(KafkaConnectorInstance connectorInstance, ReaderConfig config) {
-        throw new ConnectorException("Full synchronization is not supported");
-    }
-
-    @Override
-    public Result writer(KafkaConnectorInstance connectorInstance, WriterBatchConfig config) {
-        List<Map> data = config.getData();
-        if (CollectionUtils.isEmpty(data) || CollectionUtils.isEmpty(config.getFields())) {
-            logger.error("writer data can not be empty.");
-            throw new ConnectorException("writer data can not be empty.");
-        }
-
-        Result result = new Result();
-        final KafkaConfig cfg = connectorInstance.getConfig();
-        final List<Field> pkFields = PrimaryKeyUtil.findConfigPrimaryKeyFields(config);
-        try {
-            String topic = cfg.getTopic();
-            // 默认取第一个主键
-            final String pk = pkFields.get(0).getName();
-            data.forEach(row -> connectorInstance.getConnection().send(topic, String.valueOf(row.get(pk)), row));
-            result.addSuccessData(data);
-        } catch (Exception e) {
-            // 记录错误数据
-            result.addFailData(data);
-            result.getError().append(e.getMessage()).append(System.lineSeparator());
-            logger.error(e.getMessage());
-        }
-        return result;
-    }
-
-    @Override
-    public Map<String, String> getSourceCommand(CommandConfig commandConfig) {
-        return Collections.EMPTY_MAP;
-    }
-
-    @Override
-    public Map<String, String> getTargetCommand(CommandConfig commandConfig) {
-        return Collections.EMPTY_MAP;
-    }
-
-    @Override
-    public Listener getListener(String listenerType) {
-        return null;
-    }
-
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+package org.dbsyncer.connector.kafka;
+
+import org.apache.kafka.common.KafkaException;
+import org.dbsyncer.common.model.Result;
+import org.dbsyncer.common.util.CollectionUtils;
+import org.dbsyncer.common.util.JsonUtil;
+import org.dbsyncer.connector.kafka.config.KafkaConfig;
+import org.dbsyncer.connector.kafka.validator.KafkaConfigValidator;
+import org.dbsyncer.sdk.config.CommandConfig;
+import org.dbsyncer.sdk.config.ReaderConfig;
+import org.dbsyncer.sdk.config.WriterBatchConfig;
+import org.dbsyncer.sdk.connector.AbstractConnector;
+import org.dbsyncer.sdk.connector.ConfigValidator;
+import org.dbsyncer.sdk.connector.ConnectorInstance;
+import org.dbsyncer.sdk.listener.Listener;
+import org.dbsyncer.sdk.model.Field;
+import org.dbsyncer.sdk.model.MetaInfo;
+import org.dbsyncer.sdk.model.Table;
+import org.dbsyncer.sdk.spi.ConnectorService;
+import org.dbsyncer.sdk.util.PrimaryKeyUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Kafka连接器实现
+ *
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-11-22 23:55
+ */
+public class KafkaConnector extends AbstractConnector implements ConnectorService<KafkaConnectorInstance, KafkaConfig> {
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    private final String TYPE = "Kafka";
+    private final KafkaConfigValidator configValidator = new KafkaConfigValidator();
+
+    @Override
+    public String getConnectorType() {
+        return TYPE;
+    }
+
+    @Override
+    public boolean isSupportedTiming() {
+        return false;
+    }
+
+    @Override
+    public boolean isSupportedLog() {
+        return false;
+    }
+
+    @Override
+    public Class<KafkaConfig> getConfigClass() {
+        return KafkaConfig.class;
+    }
+
+    @Override
+    public ConnectorInstance connect(KafkaConfig config) {
+        return new KafkaConnectorInstance(config);
+    }
+
+    @Override
+    public ConfigValidator getConfigValidator() {
+        return configValidator;
+    }
+
+    @Override
+    public void disconnect(KafkaConnectorInstance connectorInstance) {
+        connectorInstance.close();
+    }
+
+    @Override
+    public boolean isAlive(KafkaConnectorInstance connectorInstance) {
+        return connectorInstance.getConnection().ping();
+    }
+
+    @Override
+    public String getConnectorInstanceCacheKey(KafkaConfig config) {
+        return String.format("%s-%s-%s-%s", config.getConnectorType(), config.getBootstrapServers(), config.getTopic(), config.getGroupId());
+    }
+
+    @Override
+    public List<Table> getTable(KafkaConnectorInstance connectorInstance) {
+        List<Table> topics = new ArrayList<>();
+        topics.add(new Table(connectorInstance.getConfig().getTopic()));
+        return topics;
+    }
+
+    @Override
+    public MetaInfo getMetaInfo(KafkaConnectorInstance connectorInstance, String tableName) {
+        KafkaConfig config = connectorInstance.getConfig();
+        List<Field> fields = JsonUtil.jsonToArray(config.getFields(), Field.class);
+        return new MetaInfo().setColumn(fields);
+    }
+
+    @Override
+    public long getCount(KafkaConnectorInstance connectorInstance, Map<String, String> command) {
+        return 0;
+    }
+
+    @Override
+    public Result reader(KafkaConnectorInstance connectorInstance, ReaderConfig config) {
+        throw new KafkaException("Full synchronization is not supported");
+    }
+
+    @Override
+    public Result writer(KafkaConnectorInstance connectorInstance, WriterBatchConfig config) {
+        List<Map> data = config.getData();
+        if (CollectionUtils.isEmpty(data) || CollectionUtils.isEmpty(config.getFields())) {
+            logger.error("writer data can not be empty.");
+            throw new KafkaException("writer data can not be empty.");
+        }
+
+        Result result = new Result();
+        final KafkaConfig cfg = connectorInstance.getConfig();
+        final List<Field> pkFields = PrimaryKeyUtil.findConfigPrimaryKeyFields(config);
+        try {
+            String topic = cfg.getTopic();
+            // 默认取第一个主键
+            final String pk = pkFields.get(0).getName();
+            data.forEach(row -> connectorInstance.getConnection().send(topic, String.valueOf(row.get(pk)), row));
+            result.addSuccessData(data);
+        } catch (Exception e) {
+            // 记录错误数据
+            result.addFailData(data);
+            result.getError().append(e.getMessage()).append(System.lineSeparator());
+            logger.error(e.getMessage());
+        }
+        return result;
+    }
+
+    @Override
+    public Map<String, String> getSourceCommand(CommandConfig commandConfig) {
+        return Collections.EMPTY_MAP;
+    }
+
+    @Override
+    public Map<String, String> getTargetCommand(CommandConfig commandConfig) {
+        return Collections.EMPTY_MAP;
+    }
+
+    @Override
+    public Listener getListener(String listenerType) {
+        return null;
+    }
+
 }

+ 12 - 2
dbsyncer-connector/src/main/java/org/dbsyncer/connector/kafka/KafkaConnectorInstance.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/KafkaConnectorInstance.java

@@ -1,9 +1,19 @@
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
 package org.dbsyncer.connector.kafka;
 
-import org.dbsyncer.connector.config.KafkaConfig;
-import org.dbsyncer.connector.util.KafkaUtil;
+import org.dbsyncer.connector.kafka.config.KafkaConfig;
+import org.dbsyncer.connector.kafka.util.KafkaUtil;
 import org.dbsyncer.sdk.connector.ConnectorInstance;
 
+/**
+ * Kafka连接器实例
+ *
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-11-22 23:55
+ */
 public final class KafkaConnectorInstance implements ConnectorInstance<KafkaConfig, KafkaClient> {
     private KafkaConfig config;
     private KafkaClient client;

+ 146 - 142
dbsyncer-connector/src/main/java/org/dbsyncer/connector/config/KafkaConfig.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/config/KafkaConfig.java

@@ -1,143 +1,147 @@
-package org.dbsyncer.connector.config;
-
-import org.dbsyncer.sdk.model.ConnectorConfig;
-
-/**
- * @author AE86
- * @ClassName: KafkaConfig
- * @Description: Kafka连接配置
- * @date: 2021年11月4日 下午8:00:00
- */
-public class KafkaConfig extends ConnectorConfig {
-
-    private String bootstrapServers;
-    private String topic;
-    private String fields;
-
-    // 消费者
-    private String deserializer;
-    private String groupId;
-    private int sessionTimeoutMs;
-    private int maxPartitionFetchBytes;
-
-    // 生产者
-    private String serializer;
-    private int bufferMemory;
-    private int batchSize;
-    private int lingerMs;
-    private String acks;
-    private int retries;
-    private int maxRequestSize;
-
-    public String getBootstrapServers() {
-        return bootstrapServers;
-    }
-
-    public void setBootstrapServers(String bootstrapServers) {
-        this.bootstrapServers = bootstrapServers;
-    }
-
-    public String getTopic() {
-        return topic;
-    }
-
-    public void setTopic(String topic) {
-        this.topic = topic;
-    }
-
-    public String getFields() {
-        return fields;
-    }
-
-    public void setFields(String fields) {
-        this.fields = fields;
-    }
-
-    public String getDeserializer() {
-        return deserializer;
-    }
-
-    public void setDeserializer(String deserializer) {
-        this.deserializer = deserializer;
-    }
-
-    public String getGroupId() {
-        return groupId;
-    }
-
-    public void setGroupId(String groupId) {
-        this.groupId = groupId;
-    }
-
-    public int getSessionTimeoutMs() {
-        return sessionTimeoutMs;
-    }
-
-    public void setSessionTimeoutMs(int sessionTimeoutMs) {
-        this.sessionTimeoutMs = sessionTimeoutMs;
-    }
-
-    public int getMaxPartitionFetchBytes() {
-        return maxPartitionFetchBytes;
-    }
-
-    public void setMaxPartitionFetchBytes(int maxPartitionFetchBytes) {
-        this.maxPartitionFetchBytes = maxPartitionFetchBytes;
-    }
-
-    public String getSerializer() {
-        return serializer;
-    }
-
-    public void setSerializer(String serializer) {
-        this.serializer = serializer;
-    }
-
-    public int getBufferMemory() {
-        return bufferMemory;
-    }
-
-    public void setBufferMemory(int bufferMemory) {
-        this.bufferMemory = bufferMemory;
-    }
-
-    public int getBatchSize() {
-        return batchSize;
-    }
-
-    public void setBatchSize(int batchSize) {
-        this.batchSize = batchSize;
-    }
-
-    public int getLingerMs() {
-        return lingerMs;
-    }
-
-    public void setLingerMs(int lingerMs) {
-        this.lingerMs = lingerMs;
-    }
-
-    public String getAcks() {
-        return acks;
-    }
-
-    public void setAcks(String acks) {
-        this.acks = acks;
-    }
-
-    public int getRetries() {
-        return retries;
-    }
-
-    public void setRetries(int retries) {
-        this.retries = retries;
-    }
-
-    public int getMaxRequestSize() {
-        return maxRequestSize;
-    }
-
-    public void setMaxRequestSize(int maxRequestSize) {
-        this.maxRequestSize = maxRequestSize;
-    }
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+package org.dbsyncer.connector.kafka.config;
+
+import org.dbsyncer.sdk.model.ConnectorConfig;
+
+/**
+ * Kafka连接配置
+ *
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-11-04 20:10
+ */
+public class KafkaConfig extends ConnectorConfig {
+
+    private String bootstrapServers;
+    private String topic;
+    private String fields;
+
+    // 消费者
+    private String deserializer;
+    private String groupId;
+    private int sessionTimeoutMs;
+    private int maxPartitionFetchBytes;
+
+    // 生产者
+    private String serializer;
+    private int bufferMemory;
+    private int batchSize;
+    private int lingerMs;
+    private String acks;
+    private int retries;
+    private int maxRequestSize;
+
+    public String getBootstrapServers() {
+        return bootstrapServers;
+    }
+
+    public void setBootstrapServers(String bootstrapServers) {
+        this.bootstrapServers = bootstrapServers;
+    }
+
+    public String getTopic() {
+        return topic;
+    }
+
+    public void setTopic(String topic) {
+        this.topic = topic;
+    }
+
+    public String getFields() {
+        return fields;
+    }
+
+    public void setFields(String fields) {
+        this.fields = fields;
+    }
+
+    public String getDeserializer() {
+        return deserializer;
+    }
+
+    public void setDeserializer(String deserializer) {
+        this.deserializer = deserializer;
+    }
+
+    public String getGroupId() {
+        return groupId;
+    }
+
+    public void setGroupId(String groupId) {
+        this.groupId = groupId;
+    }
+
+    public int getSessionTimeoutMs() {
+        return sessionTimeoutMs;
+    }
+
+    public void setSessionTimeoutMs(int sessionTimeoutMs) {
+        this.sessionTimeoutMs = sessionTimeoutMs;
+    }
+
+    public int getMaxPartitionFetchBytes() {
+        return maxPartitionFetchBytes;
+    }
+
+    public void setMaxPartitionFetchBytes(int maxPartitionFetchBytes) {
+        this.maxPartitionFetchBytes = maxPartitionFetchBytes;
+    }
+
+    public String getSerializer() {
+        return serializer;
+    }
+
+    public void setSerializer(String serializer) {
+        this.serializer = serializer;
+    }
+
+    public int getBufferMemory() {
+        return bufferMemory;
+    }
+
+    public void setBufferMemory(int bufferMemory) {
+        this.bufferMemory = bufferMemory;
+    }
+
+    public int getBatchSize() {
+        return batchSize;
+    }
+
+    public void setBatchSize(int batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    public int getLingerMs() {
+        return lingerMs;
+    }
+
+    public void setLingerMs(int lingerMs) {
+        this.lingerMs = lingerMs;
+    }
+
+    public String getAcks() {
+        return acks;
+    }
+
+    public void setAcks(String acks) {
+        this.acks = acks;
+    }
+
+    public int getRetries() {
+        return retries;
+    }
+
+    public void setRetries(int retries) {
+        this.retries = retries;
+    }
+
+    public int getMaxRequestSize() {
+        return maxRequestSize;
+    }
+
+    public void setMaxRequestSize(int maxRequestSize) {
+        this.maxRequestSize = maxRequestSize;
+    }
 }

+ 8 - 6
dbsyncer-connector/src/main/java/org/dbsyncer/connector/enums/KafkaFieldTypeEnum.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/enums/KafkaFieldTypeEnum.java

@@ -1,7 +1,10 @@
-package org.dbsyncer.connector.enums;
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+package org.dbsyncer.connector.kafka.enums;
 
+import org.apache.kafka.common.KafkaException;
 import org.dbsyncer.common.util.StringUtil;
-import org.dbsyncer.connector.ConnectorException;
 
 import java.sql.Date;
 import java.sql.Time;
@@ -13,8 +16,7 @@ import java.sql.Types;
  *
  * @author AE86
  * @version 1.0.0
- * @date 2021/12/08 21:13
- * @date 2021/12/17 0:02
+ * @date 2021/11/23 23:13
  */
 public enum KafkaFieldTypeEnum {
 
@@ -47,13 +49,13 @@ public enum KafkaFieldTypeEnum {
         this.type = type;
     }
 
-    public static Class getType(String code) throws ConnectorException {
+    public static Class getType(String code) throws KafkaException {
         for (KafkaFieldTypeEnum e : KafkaFieldTypeEnum.values()) {
             if (StringUtil.equals(e.getCode(), code)) {
                 return e.getClazz();
             }
         }
-        throw new ConnectorException(String.format("Unsupported code: %s", code));
+        throw new KafkaException(String.format("Unsupported code: %s", code));
     }
 
     public String getCode() {

+ 6 - 3
dbsyncer-connector/src/main/java/org/dbsyncer/connector/kafka/serialization/JsonToMapDeserializer.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/JsonToMapDeserializer.java

@@ -1,3 +1,6 @@
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
 package org.dbsyncer.connector.kafka.serialization;
 
 import org.apache.kafka.common.errors.SerializationException;
@@ -9,9 +12,9 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 
 /**
- * @author AE86
- * @version 1.0.0
- * @date 2021/12/16 23:09
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-12-16 23:09
  */
 public class JsonToMapDeserializer implements Deserializer<Map> {
     private String encoding = "UTF8";

+ 6 - 3
dbsyncer-connector/src/main/java/org/dbsyncer/connector/kafka/serialization/MapToJsonSerializer.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/serialization/MapToJsonSerializer.java

@@ -1,3 +1,6 @@
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
 package org.dbsyncer.connector.kafka.serialization;
 
 import org.apache.kafka.common.errors.SerializationException;
@@ -8,9 +11,9 @@ import java.io.UnsupportedEncodingException;
 import java.util.Map;
 
 /**
- * @author AE86
- * @version 1.0.0
- * @date 2021/12/16 23:09
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-12-16 23:09
  */
 public class MapToJsonSerializer implements Serializer<Map> {
     private String encoding = "UTF8";

+ 12 - 2
dbsyncer-connector/src/main/java/org/dbsyncer/connector/util/KafkaUtil.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/util/KafkaUtil.java

@@ -1,12 +1,22 @@
-package org.dbsyncer.connector.util;
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+package org.dbsyncer.connector.kafka.util;
 
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.producer.KafkaProducer;
-import org.dbsyncer.connector.config.KafkaConfig;
 import org.dbsyncer.connector.kafka.KafkaClient;
+import org.dbsyncer.connector.kafka.config.KafkaConfig;
 
 import java.util.Properties;
 
+/**
+ * Kafka连接器工具类
+ *
+ * @Author AE86
+ * @Version 1.0.0
+ * @Date 2021-12-16 23:09
+ */
 public abstract class KafkaUtil {
 
     public static KafkaClient getConnection(KafkaConfig config) {

+ 3 - 3
dbsyncer-biz/src/main/java/org/dbsyncer/biz/checker/impl/connector/KafkaConfigValidator.java → dbsyncer-connector/dbsyncer-connector-kafka/src/main/java/org/dbsyncer/connector/kafka/validator/KafkaConfigValidator.java

@@ -1,11 +1,11 @@
 /**
  * DBSyncer Copyright 2020-2023 All Rights Reserved.
  */
-package org.dbsyncer.biz.checker.impl.connector;
+package org.dbsyncer.connector.kafka.validator;
 
-import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.dbsyncer.common.util.NumberUtil;
-import org.dbsyncer.connector.config.KafkaConfig;
+import org.dbsyncer.connector.kafka.config.KafkaConfig;
+import org.dbsyncer.sdk.connector.ConfigValidator;
 import org.springframework.stereotype.Component;
 import org.springframework.util.Assert;
 

+ 1 - 0
dbsyncer-connector/dbsyncer-connector-kafka/src/main/resources/META-INF/services/org.dbsyncer.sdk.spi.ConnectorService

@@ -0,0 +1 @@
+org.dbsyncer.connector.kafka.KafkaConnector

+ 7 - 3
dbsyncer-connector/src/main/test/KafkaClientTest.java → dbsyncer-connector/dbsyncer-connector-kafka/src/test/java/KafkaClientTest.java

@@ -1,12 +1,16 @@
+/**
+ * DBSyncer Copyright 2020-2023 All Rights Reserved.
+ */
+
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.dbsyncer.common.util.JsonUtil;
-import org.dbsyncer.connector.config.KafkaConfig;
-import org.dbsyncer.connector.enums.KafkaFieldTypeEnum;
 import org.dbsyncer.connector.kafka.KafkaClient;
+import org.dbsyncer.connector.kafka.config.KafkaConfig;
+import org.dbsyncer.connector.kafka.enums.KafkaFieldTypeEnum;
 import org.dbsyncer.connector.kafka.serialization.JsonToMapDeserializer;
 import org.dbsyncer.connector.kafka.serialization.MapToJsonSerializer;
-import org.dbsyncer.connector.util.KafkaUtil;
+import org.dbsyncer.connector.kafka.util.KafkaUtil;
 import org.dbsyncer.sdk.model.Field;
 import org.junit.After;
 import org.junit.Before;

+ 9 - 7
dbsyncer-connector/pom.xml

@@ -13,6 +13,7 @@
     <modules>
         <module>dbsyncer-connector-elasticsearch</module>
         <module>dbsyncer-connector-file</module>
+        <module>dbsyncer-connector-kafka</module>
     </modules>
 
     <dependencies>
@@ -30,11 +31,18 @@
             <version>${project.parent.version}</version>
         </dependency> -->
 
-        <!-- dbsyncer-connector-file -->
+        <!-- dbsyncer-connector-file
         <dependency>
             <groupId>org.ghi</groupId>
             <artifactId>dbsyncer-connector-file</artifactId>
             <version>${project.parent.version}</version>
+        </dependency> -->
+
+        <!-- dbsyncer-connector-kafka -->
+        <dependency>
+            <groupId>org.ghi</groupId>
+            <artifactId>dbsyncer-connector-kafka</artifactId>
+            <version>${project.parent.version}</version>
         </dependency>
 
         <!-- Spring-JDBC -->
@@ -88,12 +96,6 @@
             <artifactId>elasticsearch-rest-high-level-client</artifactId>
         </dependency>
 
-        <!-- kafka -->
-        <dependency>
-            <groupId>org.apache.kafka</groupId>
-            <artifactId>kafka-clients</artifactId>
-        </dependency>
-
         <dependency>
             <groupId>com.github.jsqlparser</groupId>
             <artifactId>jsqlparser</artifactId>

+ 0 - 50
dbsyncer-connector/src/main/java/org/dbsyncer/connector/config/FileConfig.java

@@ -1,50 +0,0 @@
-package org.dbsyncer.connector.config;
-
-import org.dbsyncer.sdk.model.ConnectorConfig;
-
-/**
- * @author AE86
- * @version 1.0.0
- * @date 2022/5/5 23:19
- */
-public class FileConfig extends ConnectorConfig {
-
-    /**
-     * 文件目录
-     */
-    private String fileDir;
-
-    /**
-     * 分隔符
-     */
-    private char separator;
-
-    /**
-     * 文件描述信息
-     */
-    private String schema;
-
-    public String getFileDir() {
-        return fileDir;
-    }
-
-    public void setFileDir(String fileDir) {
-        this.fileDir = fileDir;
-    }
-
-    public char getSeparator() {
-        return separator;
-    }
-
-    public void setSeparator(char separator) {
-        this.separator = separator;
-    }
-
-    public String getSchema() {
-        return schema;
-    }
-
-    public void setSchema(String schema) {
-        this.schema = schema;
-    }
-}