ParserFactory.java 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. package org.dbsyncer.parser;
  2. import org.dbsyncer.cache.CacheService;
  3. import org.dbsyncer.common.event.RowChangedEvent;
  4. import org.dbsyncer.common.model.Result;
  5. import org.dbsyncer.common.util.CollectionUtils;
  6. import org.dbsyncer.common.util.JsonUtil;
  7. import org.dbsyncer.common.util.StringUtil;
  8. import org.dbsyncer.connector.ConnectorFactory;
  9. import org.dbsyncer.connector.ConnectorMapper;
  10. import org.dbsyncer.connector.config.CommandConfig;
  11. import org.dbsyncer.connector.config.ConnectorConfig;
  12. import org.dbsyncer.connector.config.ReaderConfig;
  13. import org.dbsyncer.connector.config.WriterBatchConfig;
  14. import org.dbsyncer.connector.constant.ConnectorConstant;
  15. import org.dbsyncer.connector.enums.ConnectorEnum;
  16. import org.dbsyncer.connector.enums.FilterEnum;
  17. import org.dbsyncer.connector.enums.OperationEnum;
  18. import org.dbsyncer.connector.model.Field;
  19. import org.dbsyncer.connector.model.MetaInfo;
  20. import org.dbsyncer.connector.model.Table;
  21. import org.dbsyncer.listener.enums.QuartzFilterEnum;
  22. import org.dbsyncer.parser.enums.ConvertEnum;
  23. import org.dbsyncer.parser.enums.ParserEnum;
  24. import org.dbsyncer.parser.event.FullRefreshEvent;
  25. import org.dbsyncer.parser.logger.LogService;
  26. import org.dbsyncer.parser.logger.LogType;
  27. import org.dbsyncer.parser.model.*;
  28. import org.dbsyncer.parser.strategy.FlushStrategy;
  29. import org.dbsyncer.parser.strategy.ParserStrategy;
  30. import org.dbsyncer.parser.util.ConvertUtil;
  31. import org.dbsyncer.parser.util.PickerUtil;
  32. import org.dbsyncer.plugin.PluginFactory;
  33. import org.dbsyncer.storage.enums.StorageDataStatusEnum;
  34. import org.json.JSONException;
  35. import org.json.JSONObject;
  36. import org.slf4j.Logger;
  37. import org.slf4j.LoggerFactory;
  38. import org.springframework.beans.factory.annotation.Autowired;
  39. import org.springframework.beans.factory.annotation.Qualifier;
  40. import org.springframework.context.ApplicationContext;
  41. import org.springframework.stereotype.Component;
  42. import org.springframework.util.Assert;
  43. import java.time.Instant;
  44. import java.util.ArrayList;
  45. import java.util.Arrays;
  46. import java.util.List;
  47. import java.util.Map;
  48. import java.util.concurrent.CountDownLatch;
  49. import java.util.concurrent.Executor;
  50. /**
  51. * @author AE86
  52. * @version 1.0.0
  53. * @date 2019/9/29 22:38
  54. */
  55. @Component
  56. public class ParserFactory implements Parser {
  57. private final Logger logger = LoggerFactory.getLogger(getClass());
  58. @Autowired
  59. private ConnectorFactory connectorFactory;
  60. @Autowired
  61. private PluginFactory pluginFactory;
  62. @Autowired
  63. private CacheService cacheService;
  64. @Autowired
  65. private LogService logService;
  66. @Autowired
  67. private FlushStrategy flushStrategy;
  68. @Autowired
  69. @Qualifier("taskExecutor")
  70. private Executor taskExecutor;
  71. @Autowired
  72. private ApplicationContext applicationContext;
  73. @Autowired
  74. private ParserStrategy parserStrategy;
  75. @Override
  76. public ConnectorMapper connect(ConnectorConfig config) {
  77. return connectorFactory.connect(config);
  78. }
  79. @Override
  80. public boolean refreshConnectorConfig(ConnectorConfig config) {
  81. return connectorFactory.refresh(config);
  82. }
  83. @Override
  84. public boolean isAliveConnectorConfig(ConnectorConfig config) {
  85. boolean alive = false;
  86. try {
  87. alive = connectorFactory.isAlive(config);
  88. } catch (Exception e) {
  89. LogType.ConnectorLog logType = LogType.ConnectorLog.FAILED;
  90. logService.log(logType, "%s%s", logType.getName(), e.getMessage());
  91. }
  92. // 断线重连
  93. if (!alive) {
  94. try {
  95. alive = connectorFactory.refresh(config);
  96. } catch (Exception e) {
  97. logger.error(e.getMessage());
  98. }
  99. if (alive) {
  100. logger.info(LogType.ConnectorLog.RECONNECT_SUCCESS.getMessage());
  101. }
  102. }
  103. return alive;
  104. }
  105. @Override
  106. public List<Table> getTable(ConnectorMapper config) {
  107. return connectorFactory.getTable(config);
  108. }
  109. @Override
  110. public MetaInfo getMetaInfo(String connectorId, String tableName) {
  111. Connector connector = getConnector(connectorId);
  112. ConnectorMapper connectorMapper = connectorFactory.connect(connector.getConfig());
  113. MetaInfo metaInfo = connectorFactory.getMetaInfo(connectorMapper, tableName);
  114. if (!CollectionUtils.isEmpty(connector.getTable())) {
  115. for (Table t : connector.getTable()) {
  116. if (t.getName().equals(tableName)) {
  117. metaInfo.setTableType(t.getType());
  118. break;
  119. }
  120. }
  121. }
  122. return metaInfo;
  123. }
  124. @Override
  125. public Map<String, String> getCommand(Mapping mapping, TableGroup tableGroup) {
  126. ConnectorConfig sConnConfig = getConnectorConfig(mapping.getSourceConnectorId());
  127. ConnectorConfig tConnConfig = getConnectorConfig(mapping.getTargetConnectorId());
  128. Table sourceTable = tableGroup.getSourceTable();
  129. Table targetTable = tableGroup.getTargetTable();
  130. Table sTable = new Table(sourceTable.getName(), sourceTable.getType(), new ArrayList<>());
  131. Table tTable = new Table(targetTable.getName(), targetTable.getType(), new ArrayList<>());
  132. List<FieldMapping> fieldMapping = tableGroup.getFieldMapping();
  133. if (!CollectionUtils.isEmpty(fieldMapping)) {
  134. fieldMapping.forEach(m -> {
  135. if (null != m.getSource()) {
  136. sTable.getColumn().add(m.getSource());
  137. }
  138. if (null != m.getTarget()) {
  139. tTable.getColumn().add(m.getTarget());
  140. }
  141. });
  142. }
  143. final CommandConfig sourceConfig = new CommandConfig(sConnConfig.getConnectorType(), sTable, sourceTable, sConnConfig, tableGroup.getFilter());
  144. final CommandConfig targetConfig = new CommandConfig(tConnConfig.getConnectorType(), tTable, targetTable, tConnConfig);
  145. // 获取连接器同步参数
  146. Map<String, String> command = connectorFactory.getCommand(sourceConfig, targetConfig);
  147. return command;
  148. }
  149. @Override
  150. public long getCount(String connectorId, Map<String, String> command) {
  151. ConnectorMapper connectorMapper = connectorFactory.connect(getConnectorConfig(connectorId));
  152. return connectorFactory.getCount(connectorMapper, command);
  153. }
  154. @Override
  155. public Connector parseConnector(String json) {
  156. try {
  157. JSONObject conn = new JSONObject(json);
  158. JSONObject config = (JSONObject) conn.remove("config");
  159. Connector connector = JsonUtil.jsonToObj(conn.toString(), Connector.class);
  160. Assert.notNull(connector, "Connector can not be null.");
  161. String connectorType = config.getString("connectorType");
  162. Class<?> configClass = ConnectorEnum.getConfigClass(connectorType);
  163. ConnectorConfig obj = (ConnectorConfig) JsonUtil.jsonToObj(config.toString(), configClass);
  164. connector.setConfig(obj);
  165. return connector;
  166. } catch (JSONException e) {
  167. logger.error(e.getMessage());
  168. throw new ParserException(e.getMessage());
  169. }
  170. }
  171. @Override
  172. public <T> T parseObject(String json, Class<T> clazz) {
  173. T t = JsonUtil.jsonToObj(json, clazz);
  174. return t;
  175. }
  176. @Override
  177. public List<ConnectorEnum> getConnectorEnumAll() {
  178. return Arrays.asList(ConnectorEnum.values());
  179. }
  180. @Override
  181. public List<OperationEnum> getOperationEnumAll() {
  182. return Arrays.asList(OperationEnum.values());
  183. }
  184. @Override
  185. public List<QuartzFilterEnum> getQuartzFilterEnumAll() {
  186. return Arrays.asList(QuartzFilterEnum.values());
  187. }
  188. @Override
  189. public List<FilterEnum> getFilterEnumAll() {
  190. return Arrays.asList(FilterEnum.values());
  191. }
  192. @Override
  193. public List<ConvertEnum> getConvertEnumAll() {
  194. return Arrays.asList(ConvertEnum.values());
  195. }
  196. @Override
  197. public List<StorageDataStatusEnum> getStorageDataStatusEnumAll() {
  198. return Arrays.asList(StorageDataStatusEnum.values());
  199. }
  200. @Override
  201. public void execute(Task task, Mapping mapping, TableGroup tableGroup) {
  202. final String metaId = task.getId();
  203. final String sourceConnectorId = mapping.getSourceConnectorId();
  204. final String targetConnectorId = mapping.getTargetConnectorId();
  205. ConnectorConfig sConfig = getConnectorConfig(sourceConnectorId);
  206. Assert.notNull(sConfig, "数据源配置不能为空.");
  207. ConnectorConfig tConfig = getConnectorConfig(targetConnectorId);
  208. Assert.notNull(tConfig, "目标源配置不能为空.");
  209. TableGroup group = PickerUtil.mergeTableGroupConfig(mapping, tableGroup);
  210. Map<String, String> command = group.getCommand();
  211. Assert.notEmpty(command, "执行命令不能为空.");
  212. List<FieldMapping> fieldMapping = group.getFieldMapping();
  213. String sTableName = group.getSourceTable().getName();
  214. String tTableName = group.getTargetTable().getName();
  215. Assert.notEmpty(fieldMapping, String.format("数据源表[%s]同步到目标源表[%s], 映射关系不能为空.", sTableName, tTableName));
  216. // 获取同步字段
  217. Picker picker = new Picker(fieldMapping);
  218. // 检查分页参数
  219. Map<String, String> params = getMeta(metaId).getMap();
  220. params.putIfAbsent(ParserEnum.PAGE_INDEX.getCode(), ParserEnum.PAGE_INDEX.getDefaultValue());
  221. int pageSize = mapping.getReadNum();
  222. int batchSize = mapping.getBatchNum();
  223. ConnectorMapper sConnectorMapper = connectorFactory.connect(sConfig);
  224. ConnectorMapper tConnectorMapper = connectorFactory.connect(tConfig);
  225. for (; ; ) {
  226. if (!task.isRunning()) {
  227. logger.warn("任务被中止:{}", metaId);
  228. break;
  229. }
  230. // 1、获取数据源数据
  231. int pageIndex = Integer.parseInt(params.get(ParserEnum.PAGE_INDEX.getCode()));
  232. Result reader = connectorFactory.reader(sConnectorMapper, new ReaderConfig(command, new ArrayList<>(), pageIndex, pageSize));
  233. List<Map> data = reader.getSuccessData();
  234. if (CollectionUtils.isEmpty(data)) {
  235. params.clear();
  236. logger.info("完成全量同步任务:{}, [{}] >> [{}]", metaId, sTableName, tTableName);
  237. break;
  238. }
  239. // 2、映射字段
  240. List<Map> target = picker.pickData(data);
  241. // 3、参数转换
  242. ConvertUtil.convert(group.getConvert(), target);
  243. // 4、插件转换
  244. pluginFactory.convert(group.getPlugin(), data, target);
  245. // 5、写入目标源
  246. Result writer = writeBatch(new BatchWriter(tConnectorMapper, command, sTableName, ConnectorConstant.OPERTION_INSERT, picker.getTargetFields(), target, batchSize));
  247. // 6、更新结果
  248. flush(task, writer);
  249. // 7、判断尾页
  250. if (data.size() < pageSize) {
  251. params.clear();
  252. logger.info("完成全量同步任务:{}, [{}] >> [{}]", metaId, sTableName, tTableName);
  253. break;
  254. }
  255. // 8、更新分页数
  256. params.put(ParserEnum.PAGE_INDEX.getCode(), String.valueOf(++pageIndex));
  257. }
  258. }
  259. @Override
  260. public void execute(Mapping mapping, TableGroup tableGroup, RowChangedEvent event) {
  261. logger.debug("Table[{}] {}, data:{}", event.getSourceTableName(), event.getEvent(), event.getDataMap());
  262. // 1、获取映射字段
  263. final Picker picker = new Picker(tableGroup.getFieldMapping());
  264. final Map target = picker.pickData(event.getDataMap());
  265. // 2、参数转换
  266. ConvertUtil.convert(tableGroup.getConvert(), target);
  267. // 3、插件转换
  268. pluginFactory.convert(tableGroup.getPlugin(), event.getEvent(), event.getDataMap(), target);
  269. // 4、处理数据
  270. parserStrategy.execute(tableGroup.getId(), event.getEvent(), target);
  271. }
  272. /**
  273. * 批量写入
  274. *
  275. * @param batchWriter
  276. * @return
  277. */
  278. @Override
  279. public Result writeBatch(BatchWriter batchWriter) {
  280. List<Map> dataList = batchWriter.getDataList();
  281. int batchSize = batchWriter.getBatchSize();
  282. String tableName = batchWriter.getTableName();
  283. String event = batchWriter.getEvent();
  284. Map<String, String> command = batchWriter.getCommand();
  285. List<Field> fields = batchWriter.getFields();
  286. // 总数
  287. int total = dataList.size();
  288. // 单次任务
  289. if (total <= batchSize) {
  290. return connectorFactory.writer(batchWriter.getConnectorMapper(), new WriterBatchConfig(tableName, event, command, fields, dataList));
  291. }
  292. // 批量任务, 拆分
  293. int taskSize = total % batchSize == 0 ? total / batchSize : total / batchSize + 1;
  294. final Result result = new Result();
  295. final CountDownLatch latch = new CountDownLatch(taskSize);
  296. int fromIndex = 0;
  297. int toIndex = batchSize;
  298. for (int i = 0; i < taskSize; i++) {
  299. final List<Map> data;
  300. if (toIndex > total) {
  301. toIndex = fromIndex + (total % batchSize);
  302. data = dataList.subList(fromIndex, toIndex);
  303. } else {
  304. data = dataList.subList(fromIndex, toIndex);
  305. fromIndex += batchSize;
  306. toIndex += batchSize;
  307. }
  308. taskExecutor.execute(() -> {
  309. try {
  310. Result w = connectorFactory.writer(batchWriter.getConnectorMapper(), new WriterBatchConfig(tableName, event, command, fields, data));
  311. result.addSuccessData(w.getSuccessData());
  312. result.addFailData(w.getFailData());
  313. result.getError().append(w.getError());
  314. } catch (Exception e) {
  315. logger.error(e.getMessage());
  316. } finally {
  317. latch.countDown();
  318. }
  319. });
  320. }
  321. try {
  322. latch.await();
  323. } catch (InterruptedException e) {
  324. logger.error(e.getMessage());
  325. }
  326. return result;
  327. }
  328. /**
  329. * 更新缓存
  330. *
  331. * @param task
  332. * @param writer
  333. */
  334. private void flush(Task task, Result writer) {
  335. flushStrategy.flushFullData(task.getId(), writer, ConnectorConstant.OPERTION_INSERT);
  336. // 发布刷新事件给FullExtractor
  337. task.setEndTime(Instant.now().toEpochMilli());
  338. applicationContext.publishEvent(new FullRefreshEvent(applicationContext, task));
  339. }
  340. /**
  341. * 获取Meta(注: 没有bean拷贝, 便于直接更新缓存)
  342. *
  343. * @param metaId
  344. * @return
  345. */
  346. private Meta getMeta(String metaId) {
  347. Assert.hasText(metaId, "Meta id can not be empty.");
  348. Meta meta = cacheService.get(metaId, Meta.class);
  349. Assert.notNull(meta, "Meta can not be null.");
  350. return meta;
  351. }
  352. /**
  353. * 获取连接器
  354. *
  355. * @param connectorId
  356. * @return
  357. */
  358. private Connector getConnector(String connectorId) {
  359. Assert.hasText(connectorId, "Connector id can not be empty.");
  360. Connector conn = cacheService.get(connectorId, Connector.class);
  361. Assert.notNull(conn, "Connector can not be null.");
  362. return conn;
  363. }
  364. /**
  365. * 获取连接配置
  366. *
  367. * @param connectorId
  368. * @return
  369. */
  370. private ConnectorConfig getConnectorConfig(String connectorId) {
  371. return getConnector(connectorId).getConfig();
  372. }
  373. }