rewer 6 年 前
コミット
0c5f5c128f
14 ファイル変更3253 行追加3243 行削除
  1. 156 156
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java
  2. 52 52
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java
  3. 117 118
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java
  4. 422 422
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java
  5. 526 526
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java
  6. 397 392
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java
  7. 216 216
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java
  8. 246 248
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java
  9. 51 53
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java
  10. 169 170
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java
  11. 506 499
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java
  12. 325 326
      client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java
  13. 35 32
      client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaFlatMessage.java
  14. 35 33
      client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaMessage.java

+ 156 - 156
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java

@@ -1,156 +1,156 @@
-package com.alibaba.otter.canal.client.adapter.config;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import org.springframework.util.PropertyPlaceholderHelper;
-import org.springframework.util.StringUtils;
-
-import com.alibaba.otter.canal.client.adapter.config.bind.PropertiesConfigurationFactory;
-import com.alibaba.otter.canal.client.adapter.config.common.*;
-
-/**
- * 将yaml内容绑定到指定对象, 遵循spring yml的绑定规范
- *
- * @author reweerma 2019-2-1 上午9:14:02
- * @version 1.0.0
- */
-public class YmlConfigBinder {
-
-    /**
-     * 将当前内容绑定到指定对象
-     *
-     * @param content yml内容
-     * @param clazz 指定对象类型
-     * @return 对象
-     */
-    public static <T> T bindYmlToObj(String content, Class<T> clazz) {
-        return bindYmlToObj(null, content, clazz, null);
-    }
-
-    /**
-     * 将当前内容绑定到指定对象并指定内容编码格式
-     *
-     * @param content yml内容
-     * @param clazz 指定对象类型
-     * @param charset yml内容编码格式
-     * @return 对象
-     */
-    public static <T> T bindYmlToObj(String content, Class<T> clazz, String charset) {
-        return bindYmlToObj(null, content, clazz, charset);
-    }
-
-    /**
-     * 将当前内容指定前缀部分绑定到指定对象
-     *
-     * @param prefix 指定前缀
-     * @param content yml内容
-     * @param clazz 指定对象类型
-     * @return 对象
-     */
-    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz) {
-        return bindYmlToObj(prefix, content, clazz, null);
-    }
-
-    /**
-     * 将当前内容指定前缀部分绑定到指定对象并指定内容编码格式
-     *
-     * @param prefix 指定前缀
-     * @param content yml内容
-     * @param clazz 指定对象类型
-     * @param charset yml内容编码格式
-     * @return 对象
-     */
-    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset) {
-        return bindYmlToObj(prefix, content, clazz, charset, null);
-    }
-
-    /**
-     * 将当前内容指定前缀部分绑定到指定对象并用环境变量中的属性替换占位符, 例: 当前内容有属性 zkServers: ${zookeeper.servers}
-     * 在envProperties中有属性 zookeeper.servers:
-     * 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 则当前内容 zkServers 会被替换为
-     * zkServers: 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 注: 假设绑定的类中
-     * zkServers 属性是 List<String> 对象, 则会自动映射成List
-     *
-     * @param prefix 指定前缀
-     * @param content yml内容
-     * @param clazz 指定对象类型
-     * @param charset yml内容编码格式
-     * @return 对象
-     */
-    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset,
-                                     Properties baseProperties) {
-        try {
-            byte[] contentBytes;
-            if (charset == null) {
-                contentBytes = content.getBytes();
-            } else {
-                contentBytes = content.getBytes(charset);
-            }
-            YamlPropertySourceLoader propertySourceLoader = new YamlPropertySourceLoader();
-            Resource configResource = new ByteArrayResource(contentBytes);
-            PropertySource propertySource = propertySourceLoader.load("manualBindConfig", configResource, null);
-
-            if (propertySource == null) {
-                return null;
-            }
-
-            Properties properties = new Properties();
-            Map<String, Object> propertiesRes = new LinkedHashMap<>();
-            if (!StringUtils.isEmpty(prefix) && !prefix.endsWith(".")) {
-                prefix = prefix + ".";
-            }
-
-            properties.putAll((Map) propertySource.getSource());
-
-            if (baseProperties != null) {
-                baseProperties.putAll(properties);
-                properties = baseProperties;
-            }
-
-            for (Object o : ((Map) propertySource.getSource()).entrySet()) {
-                Map.Entry entry = (Map.Entry) o;
-                String key = (String) entry.getKey();
-                Object value = entry.getValue();
-
-                if (prefix != null) {
-                    if (key != null && key.startsWith(prefix)) {
-                        key = key.substring(prefix.length());
-                    } else {
-                        continue;
-                    }
-                }
-
-                if (value != null && value.toString().contains("${")) {
-                    PropertyPlaceholderHelper propertyPlaceholderHelper = new PropertyPlaceholderHelper("${", "}");
-                    value = propertyPlaceholderHelper.replacePlaceholders(value.toString(), properties);
-                }
-
-                propertiesRes.put(key, value);
-            }
-
-            if (propertiesRes.isEmpty()) {
-                return null;
-            }
-
-            propertySource = new MapPropertySource(propertySource.getName(), propertiesRes);
-
-            T target = clazz.newInstance();
-
-            MutablePropertySources propertySources = new MutablePropertySources();
-            propertySources.addFirst(propertySource);
-
-            PropertiesConfigurationFactory<Object> factory = new PropertiesConfigurationFactory<Object>(target);
-            factory.setPropertySources(propertySources);
-            factory.setIgnoreInvalidFields(true);
-            factory.setIgnoreUnknownFields(true);
-
-            factory.bindPropertiesToTarget();
-
-            return target;
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.springframework.util.PropertyPlaceholderHelper;
+import org.springframework.util.StringUtils;
+
+import com.alibaba.otter.canal.client.adapter.config.bind.PropertiesConfigurationFactory;
+import com.alibaba.otter.canal.client.adapter.config.common.*;
+
+/**
+ * 将yaml内容绑定到指定对象, 遵循spring yml的绑定规范
+ *
+ * @author reweerma 2019-2-1 上午9:14:02
+ * @version 1.0.0
+ */
+public class YmlConfigBinder {
+
+    /**
+     * 将当前内容绑定到指定对象
+     *
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String content, Class<T> clazz) {
+        return bindYmlToObj(null, content, clazz, null);
+    }
+
+    /**
+     * 将当前内容绑定到指定对象并指定内容编码格式
+     *
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String content, Class<T> clazz, String charset) {
+        return bindYmlToObj(null, content, clazz, charset);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz) {
+        return bindYmlToObj(prefix, content, clazz, null);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象并指定内容编码格式
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset) {
+        return bindYmlToObj(prefix, content, clazz, charset, null);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象并用环境变量中的属性替换占位符, 例: 当前内容有属性 zkServers:
+     * ${zookeeper.servers} 在envProperties中有属性 zookeeper.servers:
+     * 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 则当前内容 zkServers 会被替换为
+     * zkServers: 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 注: 假设绑定的类中
+     * zkServers 属性是 List<String> 对象, 则会自动映射成List
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset,
+                                     Properties baseProperties) {
+        try {
+            byte[] contentBytes;
+            if (charset == null) {
+                contentBytes = content.getBytes();
+            } else {
+                contentBytes = content.getBytes(charset);
+            }
+            YamlPropertySourceLoader propertySourceLoader = new YamlPropertySourceLoader();
+            Resource configResource = new ByteArrayResource(contentBytes);
+            PropertySource<?> propertySource = propertySourceLoader.load("manualBindConfig", configResource, null);
+
+            if (propertySource == null) {
+                return null;
+            }
+
+            Properties properties = new Properties();
+            Map<String, Object> propertiesRes = new LinkedHashMap<>();
+            if (!StringUtils.isEmpty(prefix) && !prefix.endsWith(".")) {
+                prefix = prefix + ".";
+            }
+
+            properties.putAll((Map<?, ?>) propertySource.getSource());
+
+            if (baseProperties != null) {
+                baseProperties.putAll(properties);
+                properties = baseProperties;
+            }
+
+            for (Object o : ((Map<?, ?>) propertySource.getSource()).entrySet()) {
+                Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;
+                String key = (String) entry.getKey();
+                Object value = entry.getValue();
+
+                if (prefix != null) {
+                    if (key != null && key.startsWith(prefix)) {
+                        key = key.substring(prefix.length());
+                    } else {
+                        continue;
+                    }
+                }
+
+                if (value != null && value.toString().contains("${")) {
+                    PropertyPlaceholderHelper propertyPlaceholderHelper = new PropertyPlaceholderHelper("${", "}");
+                    value = propertyPlaceholderHelper.replacePlaceholders(value.toString(), properties);
+                }
+
+                propertiesRes.put(key, value);
+            }
+
+            if (propertiesRes.isEmpty()) {
+                return null;
+            }
+
+            propertySource = new MapPropertySource(propertySource.getName(), propertiesRes);
+
+            T target = clazz.newInstance();
+
+            MutablePropertySources propertySources = new MutablePropertySources();
+            propertySources.addFirst(propertySource);
+
+            PropertiesConfigurationFactory<Object> factory = new PropertiesConfigurationFactory<Object>(target);
+            factory.setPropertySources(propertySources);
+            factory.setIgnoreInvalidFields(true);
+            factory.setIgnoreUnknownFields(true);
+
+            factory.bindPropertiesToTarget();
+
+            return target;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+}

+ 52 - 52
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java

@@ -1,52 +1,52 @@
-package com.alibaba.otter.canal.client.adapter.config.bind;
-
-import org.springframework.beans.PropertyValue;
-
-import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
-
-/**
- * A {@link PropertyValue} that can provide information about its origin.
- *
- * @author Andy Wilkinson
- */
-class OriginCapablePropertyValue extends PropertyValue {
-
-    private static final String  ATTRIBUTE_PROPERTY_ORIGIN = "propertyOrigin";
-
-    private final PropertyOrigin origin;
-
-    OriginCapablePropertyValue(PropertyValue propertyValue){
-        this(propertyValue.getName(),
-            propertyValue.getValue(),
-            (PropertyOrigin) propertyValue.getAttribute(ATTRIBUTE_PROPERTY_ORIGIN));
-    }
-
-    OriginCapablePropertyValue(String name, Object value, String originName, PropertySource<?> originSource){
-        this(name, value, new PropertyOrigin(originSource, originName));
-    }
-
-    OriginCapablePropertyValue(String name, Object value, PropertyOrigin origin){
-        super(name, value);
-        this.origin = origin;
-        setAttribute(ATTRIBUTE_PROPERTY_ORIGIN, origin);
-    }
-
-    public PropertyOrigin getOrigin() {
-        return this.origin;
-    }
-
-    @Override
-    public String toString() {
-        String name = (this.origin != null ? this.origin.getName() : this.getName());
-        String source = (this.origin.getSource() != null ? this.origin.getSource().getName() : "unknown");
-        return "'" + name + "' from '" + source + "'";
-    }
-
-    public static PropertyOrigin getOrigin(PropertyValue propertyValue) {
-        if (propertyValue instanceof OriginCapablePropertyValue) {
-            return ((OriginCapablePropertyValue) propertyValue).getOrigin();
-        }
-        return new OriginCapablePropertyValue(propertyValue).getOrigin();
-    }
-
-}
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import org.springframework.beans.PropertyValue;
+
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
+
+/**
+ * A {@link PropertyValue} that can provide information about its origin.
+ *
+ * @author Andy Wilkinson
+ */
+class OriginCapablePropertyValue extends PropertyValue {
+
+    private static final String  ATTRIBUTE_PROPERTY_ORIGIN = "propertyOrigin";
+
+    private final PropertyOrigin origin;
+
+    OriginCapablePropertyValue(PropertyValue propertyValue){
+        this(propertyValue.getName(),
+            propertyValue.getValue(),
+            (PropertyOrigin) propertyValue.getAttribute(ATTRIBUTE_PROPERTY_ORIGIN));
+    }
+
+    OriginCapablePropertyValue(String name, Object value, String originName, PropertySource<?> originSource){
+        this(name, value, new PropertyOrigin(originSource, originName));
+    }
+
+    OriginCapablePropertyValue(String name, Object value, PropertyOrigin origin){
+        super(name, value);
+        this.origin = origin;
+        setAttribute(ATTRIBUTE_PROPERTY_ORIGIN, origin);
+    }
+
+    public PropertyOrigin getOrigin() {
+        return this.origin;
+    }
+
+    @Override
+    public String toString() {
+        String name = (this.origin != null ? this.origin.getName() : this.getName());
+        String source = (this.origin.getSource() != null ? this.origin.getSource().getName() : "unknown");
+        return "'" + name + "' from '" + source + "'";
+    }
+
+    public static PropertyOrigin getOrigin(PropertyValue propertyValue) {
+        if (propertyValue instanceof OriginCapablePropertyValue) {
+            return ((OriginCapablePropertyValue) propertyValue).getOrigin();
+        }
+        return new OriginCapablePropertyValue(propertyValue).getOrigin();
+    }
+
+}

+ 117 - 118
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java

@@ -1,118 +1,117 @@
-package com.alibaba.otter.canal.client.adapter.config.common;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-
-import org.springframework.core.io.InputStreamResource;
-import org.springframework.core.io.InputStreamSource;
-import org.springframework.core.io.Resource;
-import org.springframework.util.Assert;
-
-/**
- * {@link Resource} implementation for a given byte array.
- * <p>
- * Creates a {@link ByteArrayInputStream} for the given byte array.
- * <p>
- * Useful for loading content from any given byte array, without having to
- * resort to a single-use {@link InputStreamResource}. Particularly useful for
- * creating mail attachments from local content, where JavaMail needs to be able
- * to read the stream multiple times.
- *
- * @author Juergen Hoeller
- * @author Sam Brannen
- * @see ByteArrayInputStream
- * @see InputStreamResource
- * @since 1.2.3
- */
-public class ByteArrayResource extends AbstractResource {
-
-    private final byte[] byteArray;
-
-    private final String description;
-
-    /**
-     * Create a new {@code ByteArrayResource}.
-     *
-     * @param byteArray the byte array to wrap
-     */
-    public ByteArrayResource(byte[] byteArray){
-        this(byteArray, "resource loaded from byte array");
-    }
-
-    /**
-     * Create a new {@code ByteArrayResource} with a description.
-     *
-     * @param byteArray the byte array to wrap
-     * @param description where the byte array comes from
-     */
-    public ByteArrayResource(byte[] byteArray, String description){
-        Assert.notNull(byteArray, "Byte array must not be null");
-        this.byteArray = byteArray;
-        this.description = (description != null ? description : "");
-    }
-
-    /**
-     * Return the underlying byte array.
-     */
-    public final byte[] getByteArray() {
-        return this.byteArray;
-    }
-
-    /**
-     * This implementation always returns {@code true}.
-     */
-    @Override
-    public boolean exists() {
-        return true;
-    }
-
-    /**
-     * This implementation returns the length of the underlying byte array.
-     */
-    @Override
-    public long contentLength() {
-        return this.byteArray.length;
-    }
-
-    /**
-     * This implementation returns a ByteArrayInputStream for the underlying byte
-     * array.
-     *
-     * @see ByteArrayInputStream
-     */
-    @Override
-    public InputStream getInputStream() throws IOException {
-        return new ByteArrayInputStream(this.byteArray);
-    }
-
-    /**
-     * This implementation returns a description that includes the passed-in
-     * {@code description}, if any.
-     */
-    @Override
-    public String getDescription() {
-        return "Byte array resource [" + this.description + "]";
-    }
-
-    /**
-     * This implementation compares the underlying byte array.
-     *
-     * @see Arrays#equals(byte[], byte[])
-     */
-    @Override
-    public boolean equals(Object obj) {
-        return (obj == this || (obj instanceof org.springframework.core.io.ByteArrayResource
-                                && Arrays.equals(((ByteArrayResource) obj).byteArray, this.byteArray)));
-    }
-
-    /**
-     * This implementation returns the hash code based on the underlying byte array.
-     */
-    @Override
-    public int hashCode() {
-        return (byte[].class.hashCode() * 29 * this.byteArray.length);
-    }
-
-}
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.core.io.Resource;
+import org.springframework.util.Assert;
+
+/**
+ * {@link Resource} implementation for a given byte array.
+ * <p>
+ * Creates a {@link ByteArrayInputStream} for the given byte array.
+ * <p>
+ * Useful for loading content from any given byte array, without having to
+ * resort to a single-use {@link InputStreamResource}. Particularly useful for
+ * creating mail attachments from local content, where JavaMail needs to be able
+ * to read the stream multiple times.
+ *
+ * @author Juergen Hoeller
+ * @author Sam Brannen
+ * @see ByteArrayInputStream
+ * @see InputStreamResource
+ * @since 1.2.3
+ */
+public class ByteArrayResource extends AbstractResource {
+
+    private final byte[] byteArray;
+
+    private final String description;
+
+    /**
+     * Create a new {@code ByteArrayResource}.
+     *
+     * @param byteArray the byte array to wrap
+     */
+    public ByteArrayResource(byte[] byteArray){
+        this(byteArray, "resource loaded from byte array");
+    }
+
+    /**
+     * Create a new {@code ByteArrayResource} with a description.
+     *
+     * @param byteArray the byte array to wrap
+     * @param description where the byte array comes from
+     */
+    public ByteArrayResource(byte[] byteArray, String description){
+        Assert.notNull(byteArray, "Byte array must not be null");
+        this.byteArray = byteArray;
+        this.description = (description != null ? description : "");
+    }
+
+    /**
+     * Return the underlying byte array.
+     */
+    public final byte[] getByteArray() {
+        return this.byteArray;
+    }
+
+    /**
+     * This implementation always returns {@code true}.
+     */
+    @Override
+    public boolean exists() {
+        return true;
+    }
+
+    /**
+     * This implementation returns the length of the underlying byte array.
+     */
+    @Override
+    public long contentLength() {
+        return this.byteArray.length;
+    }
+
+    /**
+     * This implementation returns a ByteArrayInputStream for the underlying byte
+     * array.
+     *
+     * @see ByteArrayInputStream
+     */
+    @Override
+    public InputStream getInputStream() throws IOException {
+        return new ByteArrayInputStream(this.byteArray);
+    }
+
+    /**
+     * This implementation returns a description that includes the passed-in
+     * {@code description}, if any.
+     */
+    @Override
+    public String getDescription() {
+        return "Byte array resource [" + this.description + "]";
+    }
+
+    /**
+     * This implementation compares the underlying byte array.
+     *
+     * @see Arrays#equals(byte[], byte[])
+     */
+    @Override
+    public boolean equals(Object obj) {
+        return (obj == this || (obj instanceof org.springframework.core.io.ByteArrayResource
+                                && Arrays.equals(((ByteArrayResource) obj).byteArray, this.byteArray)));
+    }
+
+    /**
+     * This implementation returns the hash code based on the underlying byte array.
+     */
+    @Override
+    public int hashCode() {
+        return (byte[].class.hashCode() * 29 * this.byteArray.length);
+    }
+
+}

+ 422 - 422
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java

@@ -1,422 +1,422 @@
-package com.alibaba.otter.canal.client.adapter.es.config;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
-
-/**
- * ES 映射配置视图
- *
- * @author rewerma 2018-11-01
- * @version 1.0.0
- */
-public class SchemaItem {
-
-    private Map<String, TableItem>                aliasTableItems = new LinkedHashMap<>(); // 别名对应表名
-    private Map<String, FieldItem>                selectFields    = new LinkedHashMap<>(); // 查询字段
-    private String                                sql;
-
-    private volatile Map<String, List<TableItem>> tableItemAliases;
-    private volatile Map<String, List<FieldItem>> columnFields;
-    private volatile Boolean                      allFieldsSimple;
-
-    public void init() {
-        this.getTableItemAliases();
-        this.getColumnFields();
-        this.isAllFieldsSimple();
-        aliasTableItems.values().forEach(tableItem -> {
-            tableItem.getRelationTableFields();
-            tableItem.getRelationSelectFieldItems();
-        });
-    }
-
-    public Map<String, TableItem> getAliasTableItems() {
-        return aliasTableItems;
-    }
-
-    public void setAliasTableItems(Map<String, TableItem> aliasTableItems) {
-        this.aliasTableItems = aliasTableItems;
-    }
-
-    public String getSql() {
-        return sql;
-    }
-
-    public void setSql(String sql) {
-        this.sql = sql;
-    }
-
-    public Map<String, FieldItem> getSelectFields() {
-        return selectFields;
-    }
-
-    public void setSelectFields(Map<String, FieldItem> selectFields) {
-        this.selectFields = selectFields;
-    }
-
-    public Map<String, List<TableItem>> getTableItemAliases() {
-        if (tableItemAliases == null) {
-            synchronized (SchemaItem.class) {
-                if (tableItemAliases == null) {
-                    tableItemAliases = new LinkedHashMap<>();
-                    aliasTableItems.forEach((alias, tableItem) -> {
-                        List<TableItem> aliases = tableItemAliases
-                            .computeIfAbsent(tableItem.getTableName().toLowerCase(), k -> new ArrayList<>());
-                        aliases.add(tableItem);
-                    });
-                }
-            }
-        }
-        return tableItemAliases;
-    }
-
-    public Map<String, List<FieldItem>> getColumnFields() {
-        if (columnFields == null) {
-            synchronized (SchemaItem.class) {
-                if (columnFields == null) {
-                    columnFields = new LinkedHashMap<>();
-                    getSelectFields()
-                        .forEach((fieldName, fieldItem) -> fieldItem.getColumnItems().forEach(columnItem -> {
-                            TableItem tableItem = getAliasTableItems().get(columnItem.getOwner());
-                            // if (!tableItem.isSubQuery()) {
-                            List<FieldItem> fieldItems = columnFields.computeIfAbsent(
-                                columnItem.getOwner() + "." + columnItem.getColumnName(),
-                                k -> new ArrayList<>());
-                            fieldItems.add(fieldItem);
-                            // } else {
-                            // tableItem.getSubQueryFields().forEach(subQueryField -> {
-                            // List<FieldItem> fieldItems = columnFields.computeIfAbsent(
-                            // columnItem.getOwner() + "." + subQueryField.getColumn().getColumnName(),
-                            // k -> new ArrayList<>());
-                            // fieldItems.add(fieldItem);
-                            // });
-                            // }
-                        }));
-                }
-            }
-        }
-        return columnFields;
-    }
-
-    public boolean isAllFieldsSimple() {
-        if (allFieldsSimple == null) {
-            synchronized (SchemaItem.class) {
-                if (allFieldsSimple == null) {
-                    allFieldsSimple = true;
-
-                    for (FieldItem fieldItem : getSelectFields().values()) {
-                        if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
-                            allFieldsSimple = false;
-                            break;
-                        }
-                    }
-                }
-            }
-        }
-
-        return allFieldsSimple;
-    }
-
-    public TableItem getMainTable() {
-        if (!aliasTableItems.isEmpty()) {
-            return aliasTableItems.values().iterator().next();
-        } else {
-            return null;
-        }
-    }
-
-    public FieldItem getIdFieldItem(ESMapping mapping) {
-        if (mapping.get_id() != null) {
-            return getSelectFields().get(mapping.get_id());
-        } else {
-            return getSelectFields().get(mapping.getPk());
-        }
-    }
-
-    public static class TableItem {
-
-        private SchemaItem                               schemaItem;
-
-        private String                                   schema;
-        private String                                   tableName;
-        private String                                   alias;
-        private String                                   subQuerySql;
-        private List<FieldItem>                          subQueryFields = new ArrayList<>();
-        private List<RelationFieldsPair>                 relationFields = new ArrayList<>();
-
-        private boolean                                  main;
-        private boolean                                  subQuery;
-
-        private volatile Map<FieldItem, List<FieldItem>> relationTableFields;               // 当前表关联条件字段对应主表查询字段
-        private volatile List<FieldItem>                 relationSelectFieldItems;          // 子表所在主表的查询字段
-
-        public TableItem(SchemaItem schemaItem){
-            this.schemaItem = schemaItem;
-        }
-
-        public SchemaItem getSchemaItem() {
-            return schemaItem;
-        }
-
-        public void setSchemaItem(SchemaItem schemaItem) {
-            this.schemaItem = schemaItem;
-        }
-
-        public String getSchema() {
-            return schema;
-        }
-
-        public void setSchema(String schema) {
-            this.schema = schema;
-        }
-
-        public String getTableName() {
-            return tableName;
-        }
-
-        public void setTableName(String tableName) {
-            this.tableName = tableName;
-        }
-
-        public String getAlias() {
-            return alias;
-        }
-
-        public void setAlias(String alias) {
-            this.alias = alias;
-        }
-
-        public String getSubQuerySql() {
-            return subQuerySql;
-        }
-
-        public void setSubQuerySql(String subQuerySql) {
-            this.subQuerySql = subQuerySql;
-        }
-
-        public boolean isMain() {
-            return main;
-        }
-
-        public void setMain(boolean main) {
-            this.main = main;
-        }
-
-        public boolean isSubQuery() {
-            return subQuery;
-        }
-
-        public void setSubQuery(boolean subQuery) {
-            this.subQuery = subQuery;
-        }
-
-        public List<FieldItem> getSubQueryFields() {
-            return subQueryFields;
-        }
-
-        public void setSubQueryFields(List<FieldItem> subQueryFields) {
-            this.subQueryFields = subQueryFields;
-        }
-
-        public List<RelationFieldsPair> getRelationFields() {
-            return relationFields;
-        }
-
-        public void setRelationFields(List<RelationFieldsPair> relationFields) {
-            this.relationFields = relationFields;
-        }
-
-        public Map<FieldItem, List<FieldItem>> getRelationTableFields() {
-            if (relationTableFields == null) {
-                synchronized (SchemaItem.class) {
-                    if (relationTableFields == null) {
-                        relationTableFields = new LinkedHashMap<>();
-
-                        getRelationFields().forEach(relationFieldsPair -> {
-                            FieldItem leftFieldItem = relationFieldsPair.getLeftFieldItem();
-                            FieldItem rightFieldItem = relationFieldsPair.getRightFieldItem();
-                            FieldItem currentTableRelField = null;
-                            if (getAlias().equals(leftFieldItem.getOwner())) {
-                                currentTableRelField = leftFieldItem;
-                            } else if (getAlias().equals(rightFieldItem.getOwner())) {
-                                currentTableRelField = rightFieldItem;
-                            }
-
-                            if (currentTableRelField != null) {
-                                List<FieldItem> selectFieldItem = getSchemaItem().getColumnFields()
-                                    .get(leftFieldItem.getOwner() + "." + leftFieldItem.getColumn().getColumnName());
-                                if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
-                                    relationTableFields.put(currentTableRelField, selectFieldItem);
-                                } else {
-                                    selectFieldItem = getSchemaItem().getColumnFields()
-                                        .get(rightFieldItem.getOwner() + "."
-                                             + rightFieldItem.getColumn().getColumnName());
-                                    if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
-                                        relationTableFields.put(currentTableRelField, selectFieldItem);
-                                    } else {
-                                        throw new UnsupportedOperationException(
-                                            "Relation condition column must in select columns.");
-                                    }
-                                }
-                            }
-                        });
-                    }
-                }
-            }
-            return relationTableFields;
-        }
-
-        public List<FieldItem> getRelationSelectFieldItems() {
-            if (relationSelectFieldItems == null) {
-                synchronized (SchemaItem.class) {
-                    if (relationSelectFieldItems == null) {
-                        relationSelectFieldItems = new ArrayList<>();
-                        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-                            if (fieldItem.getOwners().contains(getAlias())) {
-                                relationSelectFieldItems.add(fieldItem);
-                            }
-                        }
-                    }
-                }
-            }
-            return relationSelectFieldItems;
-        }
-    }
-
-    public static class RelationFieldsPair {
-
-        private FieldItem leftFieldItem;
-        private FieldItem rightFieldItem;
-
-        public RelationFieldsPair(FieldItem leftFieldItem, FieldItem rightFieldItem){
-            this.leftFieldItem = leftFieldItem;
-            this.rightFieldItem = rightFieldItem;
-        }
-
-        public FieldItem getLeftFieldItem() {
-            return leftFieldItem;
-        }
-
-        public void setLeftFieldItem(FieldItem leftFieldItem) {
-            this.leftFieldItem = leftFieldItem;
-        }
-
-        public FieldItem getRightFieldItem() {
-            return rightFieldItem;
-        }
-
-        public void setRightFieldItem(FieldItem rightFieldItem) {
-            this.rightFieldItem = rightFieldItem;
-        }
-    }
-
-    public static class FieldItem {
-
-        private String           fieldName;
-        private List<ColumnItem> columnItems = new ArrayList<>();
-        private List<String>     owners      = new ArrayList<>();
-
-        private boolean          method;
-        private boolean          binaryOp;
-
-        public String getFieldName() {
-            return fieldName;
-        }
-
-        public void setFieldName(String fieldName) {
-            this.fieldName = fieldName;
-        }
-
-        public List<ColumnItem> getColumnItems() {
-            return columnItems;
-        }
-
-        public void setColumnItems(List<ColumnItem> columnItems) {
-            this.columnItems = columnItems;
-        }
-
-        public boolean isMethod() {
-            return method;
-        }
-
-        public void setMethod(boolean method) {
-            this.method = method;
-        }
-
-        public boolean isBinaryOp() {
-            return binaryOp;
-        }
-
-        public void setBinaryOp(boolean binaryOp) {
-            this.binaryOp = binaryOp;
-        }
-
-        public List<String> getOwners() {
-            return owners;
-        }
-
-        public void setOwners(List<String> owners) {
-            this.owners = owners;
-        }
-
-        public void addColumn(ColumnItem columnItem) {
-            columnItems.add(columnItem);
-        }
-
-        public ColumnItem getColumn() {
-            if (!columnItems.isEmpty()) {
-                return columnItems.get(0);
-            } else {
-                return null;
-            }
-        }
-
-        public String getOwner() {
-            if (!owners.isEmpty()) {
-                return owners.get(0);
-            } else {
-                return null;
-            }
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
-
-            FieldItem fieldItem = (FieldItem) o;
-
-            return fieldName != null ? fieldName.equals(fieldItem.fieldName) : fieldItem.fieldName == null;
-        }
-
-        @Override
-        public int hashCode() {
-            return fieldName != null ? fieldName.hashCode() : 0;
-        }
-    }
-
-    public static class ColumnItem {
-
-        private String owner;
-        private String columnName;
-
-        public String getOwner() {
-            return owner;
-        }
-
-        public void setOwner(String owner) {
-            this.owner = owner;
-        }
-
-        public String getColumnName() {
-            return columnName;
-        }
-
-        public void setColumnName(String columnName) {
-            this.columnName = columnName;
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.es.config;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+
+/**
+ * ES 映射配置视图
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class SchemaItem {
+
+    private Map<String, TableItem>                aliasTableItems = new LinkedHashMap<>(); // 别名对应表名
+    private Map<String, FieldItem>                selectFields    = new LinkedHashMap<>(); // 查询字段
+    private String                                sql;
+
+    private volatile Map<String, List<TableItem>> tableItemAliases;
+    private volatile Map<String, List<FieldItem>> columnFields;
+    private volatile Boolean                      allFieldsSimple;
+
+    public void init() {
+        this.getTableItemAliases();
+        this.getColumnFields();
+        this.isAllFieldsSimple();
+        aliasTableItems.values().forEach(tableItem -> {
+            tableItem.getRelationTableFields();
+            tableItem.getRelationSelectFieldItems();
+        });
+    }
+
+    public Map<String, TableItem> getAliasTableItems() {
+        return aliasTableItems;
+    }
+
+    public void setAliasTableItems(Map<String, TableItem> aliasTableItems) {
+        this.aliasTableItems = aliasTableItems;
+    }
+
+    public String getSql() {
+        return sql;
+    }
+
+    public void setSql(String sql) {
+        this.sql = sql;
+    }
+
+    public Map<String, FieldItem> getSelectFields() {
+        return selectFields;
+    }
+
+    public void setSelectFields(Map<String, FieldItem> selectFields) {
+        this.selectFields = selectFields;
+    }
+
+    public Map<String, List<TableItem>> getTableItemAliases() {
+        if (tableItemAliases == null) {
+            synchronized (SchemaItem.class) {
+                if (tableItemAliases == null) {
+                    tableItemAliases = new LinkedHashMap<>();
+                    aliasTableItems.forEach((alias, tableItem) -> {
+                        List<TableItem> aliases = tableItemAliases
+                            .computeIfAbsent(tableItem.getTableName().toLowerCase(), k -> new ArrayList<>());
+                        aliases.add(tableItem);
+                    });
+                }
+            }
+        }
+        return tableItemAliases;
+    }
+
+    public Map<String, List<FieldItem>> getColumnFields() {
+        if (columnFields == null) {
+            synchronized (SchemaItem.class) {
+                if (columnFields == null) {
+                    columnFields = new LinkedHashMap<>();
+                    getSelectFields()
+                        .forEach((fieldName, fieldItem) -> fieldItem.getColumnItems().forEach(columnItem -> {
+                            // TableItem tableItem = getAliasTableItems().get(columnItem.getOwner());
+                            // if (!tableItem.isSubQuery()) {
+                            List<FieldItem> fieldItems = columnFields.computeIfAbsent(
+                                columnItem.getOwner() + "." + columnItem.getColumnName(),
+                                k -> new ArrayList<>());
+                            fieldItems.add(fieldItem);
+                            // } else {
+                            // tableItem.getSubQueryFields().forEach(subQueryField -> {
+                            // List<FieldItem> fieldItems = columnFields.computeIfAbsent(
+                            // columnItem.getOwner() + "." + subQueryField.getColumn().getColumnName(),
+                            // k -> new ArrayList<>());
+                            // fieldItems.add(fieldItem);
+                            // });
+                            // }
+                        }));
+                }
+            }
+        }
+        return columnFields;
+    }
+
+    public boolean isAllFieldsSimple() {
+        if (allFieldsSimple == null) {
+            synchronized (SchemaItem.class) {
+                if (allFieldsSimple == null) {
+                    allFieldsSimple = true;
+
+                    for (FieldItem fieldItem : getSelectFields().values()) {
+                        if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                            allFieldsSimple = false;
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+
+        return allFieldsSimple;
+    }
+
+    public TableItem getMainTable() {
+        if (!aliasTableItems.isEmpty()) {
+            return aliasTableItems.values().iterator().next();
+        } else {
+            return null;
+        }
+    }
+
+    public FieldItem getIdFieldItem(ESMapping mapping) {
+        if (mapping.get_id() != null) {
+            return getSelectFields().get(mapping.get_id());
+        } else {
+            return getSelectFields().get(mapping.getPk());
+        }
+    }
+
+    public static class TableItem {
+
+        private SchemaItem                               schemaItem;
+
+        private String                                   schema;
+        private String                                   tableName;
+        private String                                   alias;
+        private String                                   subQuerySql;
+        private List<FieldItem>                          subQueryFields = new ArrayList<>();
+        private List<RelationFieldsPair>                 relationFields = new ArrayList<>();
+
+        private boolean                                  main;
+        private boolean                                  subQuery;
+
+        private volatile Map<FieldItem, List<FieldItem>> relationTableFields;               // 当前表关联条件字段对应主表查询字段
+        private volatile List<FieldItem>                 relationSelectFieldItems;          // 子表所在主表的查询字段
+
+        public TableItem(SchemaItem schemaItem){
+            this.schemaItem = schemaItem;
+        }
+
+        public SchemaItem getSchemaItem() {
+            return schemaItem;
+        }
+
+        public void setSchemaItem(SchemaItem schemaItem) {
+            this.schemaItem = schemaItem;
+        }
+
+        public String getSchema() {
+            return schema;
+        }
+
+        public void setSchema(String schema) {
+            this.schema = schema;
+        }
+
+        public String getTableName() {
+            return tableName;
+        }
+
+        public void setTableName(String tableName) {
+            this.tableName = tableName;
+        }
+
+        public String getAlias() {
+            return alias;
+        }
+
+        public void setAlias(String alias) {
+            this.alias = alias;
+        }
+
+        public String getSubQuerySql() {
+            return subQuerySql;
+        }
+
+        public void setSubQuerySql(String subQuerySql) {
+            this.subQuerySql = subQuerySql;
+        }
+
+        public boolean isMain() {
+            return main;
+        }
+
+        public void setMain(boolean main) {
+            this.main = main;
+        }
+
+        public boolean isSubQuery() {
+            return subQuery;
+        }
+
+        public void setSubQuery(boolean subQuery) {
+            this.subQuery = subQuery;
+        }
+
+        public List<FieldItem> getSubQueryFields() {
+            return subQueryFields;
+        }
+
+        public void setSubQueryFields(List<FieldItem> subQueryFields) {
+            this.subQueryFields = subQueryFields;
+        }
+
+        public List<RelationFieldsPair> getRelationFields() {
+            return relationFields;
+        }
+
+        public void setRelationFields(List<RelationFieldsPair> relationFields) {
+            this.relationFields = relationFields;
+        }
+
+        public Map<FieldItem, List<FieldItem>> getRelationTableFields() {
+            if (relationTableFields == null) {
+                synchronized (SchemaItem.class) {
+                    if (relationTableFields == null) {
+                        relationTableFields = new LinkedHashMap<>();
+
+                        getRelationFields().forEach(relationFieldsPair -> {
+                            FieldItem leftFieldItem = relationFieldsPair.getLeftFieldItem();
+                            FieldItem rightFieldItem = relationFieldsPair.getRightFieldItem();
+                            FieldItem currentTableRelField = null;
+                            if (getAlias().equals(leftFieldItem.getOwner())) {
+                                currentTableRelField = leftFieldItem;
+                            } else if (getAlias().equals(rightFieldItem.getOwner())) {
+                                currentTableRelField = rightFieldItem;
+                            }
+
+                            if (currentTableRelField != null) {
+                                List<FieldItem> selectFieldItem = getSchemaItem().getColumnFields()
+                                    .get(leftFieldItem.getOwner() + "." + leftFieldItem.getColumn().getColumnName());
+                                if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
+                                    relationTableFields.put(currentTableRelField, selectFieldItem);
+                                } else {
+                                    selectFieldItem = getSchemaItem().getColumnFields()
+                                        .get(rightFieldItem.getOwner() + "."
+                                             + rightFieldItem.getColumn().getColumnName());
+                                    if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
+                                        relationTableFields.put(currentTableRelField, selectFieldItem);
+                                    } else {
+                                        throw new UnsupportedOperationException(
+                                            "Relation condition column must in select columns.");
+                                    }
+                                }
+                            }
+                        });
+                    }
+                }
+            }
+            return relationTableFields;
+        }
+
+        public List<FieldItem> getRelationSelectFieldItems() {
+            if (relationSelectFieldItems == null) {
+                synchronized (SchemaItem.class) {
+                    if (relationSelectFieldItems == null) {
+                        relationSelectFieldItems = new ArrayList<>();
+                        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+                            if (fieldItem.getOwners().contains(getAlias())) {
+                                relationSelectFieldItems.add(fieldItem);
+                            }
+                        }
+                    }
+                }
+            }
+            return relationSelectFieldItems;
+        }
+    }
+
+    public static class RelationFieldsPair {
+
+        private FieldItem leftFieldItem;
+        private FieldItem rightFieldItem;
+
+        public RelationFieldsPair(FieldItem leftFieldItem, FieldItem rightFieldItem){
+            this.leftFieldItem = leftFieldItem;
+            this.rightFieldItem = rightFieldItem;
+        }
+
+        public FieldItem getLeftFieldItem() {
+            return leftFieldItem;
+        }
+
+        public void setLeftFieldItem(FieldItem leftFieldItem) {
+            this.leftFieldItem = leftFieldItem;
+        }
+
+        public FieldItem getRightFieldItem() {
+            return rightFieldItem;
+        }
+
+        public void setRightFieldItem(FieldItem rightFieldItem) {
+            this.rightFieldItem = rightFieldItem;
+        }
+    }
+
+    public static class FieldItem {
+
+        private String           fieldName;
+        private List<ColumnItem> columnItems = new ArrayList<>();
+        private List<String>     owners      = new ArrayList<>();
+
+        private boolean          method;
+        private boolean          binaryOp;
+
+        public String getFieldName() {
+            return fieldName;
+        }
+
+        public void setFieldName(String fieldName) {
+            this.fieldName = fieldName;
+        }
+
+        public List<ColumnItem> getColumnItems() {
+            return columnItems;
+        }
+
+        public void setColumnItems(List<ColumnItem> columnItems) {
+            this.columnItems = columnItems;
+        }
+
+        public boolean isMethod() {
+            return method;
+        }
+
+        public void setMethod(boolean method) {
+            this.method = method;
+        }
+
+        public boolean isBinaryOp() {
+            return binaryOp;
+        }
+
+        public void setBinaryOp(boolean binaryOp) {
+            this.binaryOp = binaryOp;
+        }
+
+        public List<String> getOwners() {
+            return owners;
+        }
+
+        public void setOwners(List<String> owners) {
+            this.owners = owners;
+        }
+
+        public void addColumn(ColumnItem columnItem) {
+            columnItems.add(columnItem);
+        }
+
+        public ColumnItem getColumn() {
+            if (!columnItems.isEmpty()) {
+                return columnItems.get(0);
+            } else {
+                return null;
+            }
+        }
+
+        public String getOwner() {
+            if (!owners.isEmpty()) {
+                return owners.get(0);
+            } else {
+                return null;
+            }
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+
+            FieldItem fieldItem = (FieldItem) o;
+
+            return fieldName != null ? fieldName.equals(fieldItem.fieldName) : fieldItem.fieldName == null;
+        }
+
+        @Override
+        public int hashCode() {
+            return fieldName != null ? fieldName.hashCode() : 0;
+        }
+    }
+
+    public static class ColumnItem {
+
+        private String owner;
+        private String columnName;
+
+        public String getOwner() {
+            return owner;
+        }
+
+        public void setOwner(String owner) {
+            this.owner = owner;
+        }
+
+        public String getColumnName() {
+            return columnName;
+        }
+
+        public void setColumnName(String columnName) {
+            this.columnName = columnName;
+        }
+    }
+}

+ 526 - 526
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java

@@ -1,526 +1,526 @@
-package com.alibaba.otter.canal.client.adapter.es.support;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.TimeUnit;
-
-import javax.sql.DataSource;
-
-import com.alibaba.fastjson.JSON;
-import org.elasticsearch.action.bulk.BulkItemResponse;
-import org.elasticsearch.action.bulk.BulkRequestBuilder;
-import org.elasticsearch.action.bulk.BulkResponse;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.client.transport.TransportClient;
-import org.elasticsearch.cluster.metadata.MappingMetaData;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.index.query.BoolQueryBuilder;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.reindex.BulkByScrollResponse;
-import org.elasticsearch.index.reindex.UpdateByQueryAction;
-import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.script.Script;
-import org.elasticsearch.script.ScriptType;
-import org.elasticsearch.search.SearchHit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.util.CollectionUtils;
-
-import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
-import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
-import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
-import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
-import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
-import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
-
-/**
- * ES 操作模板
- *
- * @author rewerma 2018-11-01
- * @version 1.0.0
- */
-public class ESTemplate {
-
-    private static final Logger logger         = LoggerFactory.getLogger(ESTemplate.class);
-
-    private static final int    MAX_BATCH_SIZE = 1000;
-
-    private TransportClient     transportClient;
-
-    public ESTemplate(TransportClient transportClient){
-        this.transportClient = transportClient;
-    }
-
-    /**
-     * 插入数据
-     * 
-     * @param mapping
-     * @param pkVal
-     * @param esFieldData
-     * @return
-     */
-    public boolean insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-        if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
-                    .setSource(esFieldData));
-        } else {
-            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-                .setTypes(mapping.get_type())
-                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
-                .get();
-            for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
-            }
-            bulkRequestBuilder
-                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type()).setSource(esFieldData));
-        }
-        return commitBulkRequest(bulkRequestBuilder);
-    }
-
-    /**
-     * 根据主键更新数据
-     * 
-     * @param mapping
-     * @param pkVal
-     * @param esFieldData
-     * @return
-     */
-    public boolean update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-        append4Update(bulkRequestBuilder, mapping, pkVal, esFieldData);
-        return commitBulkRequest(bulkRequestBuilder);
-    }
-
-    public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal,
-                              Map<String, Object> esFieldData) {
-        if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
-                    .setDoc(esFieldData));
-        } else {
-            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-                .setTypes(mapping.get_type())
-                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
-                .get();
-            for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
-                        .setDoc(esFieldData));
-            }
-        }
-    }
-
-    /**
-     * update by query
-     *
-     * @param config
-     * @param paramsTmp
-     * @param esFieldData
-     * @return
-     */
-    public boolean updateByQuery(ESSyncConfig config, Map<String, Object> paramsTmp, Map<String, Object> esFieldData) {
-        if (paramsTmp.isEmpty()) {
-            return false;
-        }
-        ESMapping mapping = config.getEsMapping();
-        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
-        paramsTmp.forEach((fieldName, value) -> queryBuilder.must(QueryBuilders.termsQuery(fieldName, value)));
-
-        SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-            .setTypes(mapping.get_type())
-            .setSize(0)
-            .setQuery(queryBuilder)
-            .get();
-        long count = response.getHits().getTotalHits();
-        // 如果更新量大于Max, 查询sql批量更新
-        if (count > MAX_BATCH_SIZE) {
-            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-
-            DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
-            // 查询sql更新
-            StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE ");
-            paramsTmp.forEach(
-                (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
-            int len = sql.length();
-            sql.delete(len - 4, len);
-            ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
-                int exeCount = 1;
-                try {
-                    BulkRequestBuilder bulkRequestBuilderTmp = bulkRequestBuilder;
-                    while (rs.next()) {
-                        Object idVal = getIdValFromRS(mapping, rs);
-                        append4Update(bulkRequestBuilderTmp, mapping, idVal, esFieldData);
-
-                        if (exeCount % mapping.getCommitBatch() == 0 && bulkRequestBuilderTmp.numberOfActions() > 0) {
-                            commitBulkRequest(bulkRequestBuilderTmp);
-                            bulkRequestBuilderTmp = transportClient.prepareBulk();
-                        }
-                        exeCount++;
-                    }
-
-                    if (bulkRequestBuilder.numberOfActions() > 0) {
-                        commitBulkRequest(bulkRequestBuilderTmp);
-                    }
-                } catch (Exception e) {
-                    throw new RuntimeException(e);
-                }
-                return 0;
-            });
-            return true;
-        } else {
-            return updateByQuery(mapping, queryBuilder, esFieldData, 1);
-        }
-    }
-
-    private boolean updateByQuery(ESMapping mapping, QueryBuilder queryBuilder, Map<String, Object> esFieldData,
-                                  int counter) {
-        if (CollectionUtils.isEmpty(esFieldData)) {
-            return true;
-        }
-
-        StringBuilder sb = new StringBuilder();
-        esFieldData.forEach((key, value) -> {
-            if (value instanceof Map) {
-                HashMap mapValue = (HashMap) value;
-                if (mapValue.containsKey("lon") && mapValue.containsKey("lat") && mapValue.size() == 2) {
-                    sb.append("ctx._source")
-                        .append("['")
-                        .append(key)
-                        .append("']")
-                        .append(" = [")
-                        .append(mapValue.get("lon"))
-                        .append(", ")
-                        .append(mapValue.get("lat"))
-                        .append("];");
-                } else {
-                    sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
-                    sb.append(JSON.toJSONString(value));
-                    sb.append(";");
-                }
-            } else if (value instanceof List) {
-                sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
-                sb.append(JSON.toJSONString(value));
-                sb.append(";");
-            } else if (value instanceof String) {
-                sb.append("ctx._source")
-                    .append("['")
-                    .append(key)
-                    .append("']")
-                    .append(" = '")
-                    .append(value)
-                    .append("';");
-            } else {
-                sb.append("ctx._source").append("['").append(key).append("']").append(" = ").append(value).append(";");
-            }
-        });
-        String scriptLine = sb.toString();
-        if (logger.isTraceEnabled()) {
-            logger.trace(scriptLine);
-        }
-
-        UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(transportClient);
-        updateByQuery.source(mapping.get_index())
-            .abortOnVersionConflict(false)
-            .filter(queryBuilder)
-            .script(new Script(ScriptType.INLINE, "painless", scriptLine, Collections.emptyMap()));
-
-        BulkByScrollResponse response = updateByQuery.get();
-        if (logger.isTraceEnabled()) {
-            logger.trace("updateByQuery response: {}", response.getStatus());
-        }
-        if (!CollectionUtils.isEmpty(response.getSearchFailures())) {
-            logger.error("script update_for_search has search error: " + response.getBulkFailures());
-            return false;
-        }
-
-        if (!CollectionUtils.isEmpty(response.getBulkFailures())) {
-            logger.error("script update_for_search has update error: " + response.getBulkFailures());
-            return false;
-        }
-
-        if (response.getStatus().getVersionConflicts() > 0) {
-            if (counter >= 3) {
-                logger.error("第 {} 次执行updateByQuery, 依旧存在分片版本冲突,不再继续重试。", counter);
-                return false;
-            }
-            logger.warn("本次updateByQuery存在分片版本冲突,准备重新执行...");
-            try {
-                TimeUnit.SECONDS.sleep(1);
-            } catch (InterruptedException e) {
-                // ignore
-            }
-            return updateByQuery(mapping, queryBuilder, esFieldData, ++counter);
-        }
-
-        return true;
-    }
-
-    /**
-     * 通过主键删除数据
-     *
-     * @param mapping
-     * @param pkVal
-     * @return
-     */
-    public boolean delete(ESMapping mapping, Object pkVal) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-        if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), pkVal.toString()));
-        } else {
-            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-                .setTypes(mapping.get_type())
-                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
-                .get();
-            for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
-            }
-        }
-        return commitBulkRequest(bulkRequestBuilder);
-    }
-
-    /**
-     * 批量提交
-     *
-     * @param bulkRequestBuilder
-     * @return
-     */
-    private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) {
-        if (bulkRequestBuilder.numberOfActions() > 0) {
-            BulkResponse response = bulkRequestBuilder.execute().actionGet();
-            if (response.hasFailures()) {
-                for (BulkItemResponse itemResponse : response.getItems()) {
-                    if (!itemResponse.isFailed()) {
-                        continue;
-                    }
-
-                    if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) {
-                        logger.warn(itemResponse.getFailureMessage());
-                    } else {
-                        logger.error("ES sync commit error: {}", itemResponse.getFailureMessage());
-                    }
-                }
-            }
-
-            return !response.hasFailures();
-        }
-        return true;
-    }
-
-    public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName,
-                               String columnName) throws SQLException {
-        String esType = getEsType(mapping, fieldName);
-
-        Object value = resultSet.getObject(columnName);
-        if (value instanceof Boolean) {
-            if (!"boolean".equals(esType)) {
-                value = resultSet.getByte(columnName);
-            }
-        }
-
-        // 如果是对象类型
-        if (mapping.getObjFields().containsKey(fieldName)) {
-            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
-        } else {
-            return ESSyncUtil.typeConvert(value, esType);
-        }
-    }
-
-    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet,
-                                  Map<String, Object> esFieldData) throws SQLException {
-        SchemaItem schemaItem = mapping.getSchemaItem();
-        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
-        Object resultIdVal = null;
-        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
-
-            if (fieldItem.getFieldName().equals(idFieldName)) {
-                resultIdVal = value;
-            }
-
-            if (!fieldItem.getFieldName().equals(mapping.get_id())
-                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(), value);
-            }
-        }
-        return resultIdVal;
-    }
-
-    public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException {
-        SchemaItem schemaItem = mapping.getSchemaItem();
-        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
-        Object resultIdVal = null;
-        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
-
-            if (fieldItem.getFieldName().equals(idFieldName)) {
-                resultIdVal = value;
-                break;
-            }
-        }
-        return resultIdVal;
-    }
-
-    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map<String, Object> dmlOld,
-                                  Map<String, Object> esFieldData) throws SQLException {
-        SchemaItem schemaItem = mapping.getSchemaItem();
-        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
-        Object resultIdVal = null;
-        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-            if (fieldItem.getFieldName().equals(idFieldName)) {
-                resultIdVal = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
-            }
-
-            for (ColumnItem columnItem : fieldItem.getColumnItems()) {
-                if (dmlOld.containsKey(columnItem.getColumnName())
-                    && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                    esFieldData.put(fieldItem.getFieldName(),
-                        getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()));
-                    break;
-                }
-            }
-        }
-        return resultIdVal;
-    }
-
-    public Object getValFromData(ESMapping mapping, Map<String, Object> dmlData, String fieldName, String columnName) {
-        String esType = getEsType(mapping, fieldName);
-        Object value = dmlData.get(columnName);
-        if (value instanceof Byte) {
-            if ("boolean".equals(esType)) {
-                value = ((Byte) value).intValue() != 0;
-            }
-        }
-
-        // 如果是对象类型
-        if (mapping.getObjFields().containsKey(fieldName)) {
-            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
-        } else {
-            return ESSyncUtil.typeConvert(value, esType);
-        }
-    }
-
-    /**
-     * 将dml的data转换为es的data
-     *
-     * @param mapping 配置mapping
-     * @param dmlData dml data
-     * @param esFieldData es data
-     * @return 返回 id 值
-     */
-    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData,
-                                       Map<String, Object> esFieldData) {
-        SchemaItem schemaItem = mapping.getSchemaItem();
-        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
-        Object resultIdVal = null;
-        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
-            Object value = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
-
-            if (fieldItem.getFieldName().equals(idFieldName)) {
-                resultIdVal = value;
-            }
-
-            if (!fieldItem.getFieldName().equals(mapping.get_id())
-                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(), value);
-            }
-        }
-        return resultIdVal;
-    }
-
-    /**
-     * 将dml的data, old转换为es的data
-     *
-     * @param mapping 配置mapping
-     * @param dmlData dml data
-     * @param esFieldData es data
-     * @return 返回 id 值
-     */
-    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData, Map<String, Object> dmlOld,
-                                       Map<String, Object> esFieldData) {
-        SchemaItem schemaItem = mapping.getSchemaItem();
-        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
-        Object resultIdVal = null;
-        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
-            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
-
-            if (fieldItem.getFieldName().equals(idFieldName)) {
-                resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
-            }
-
-            if (dmlOld.get(columnName) != null && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(),
-                    getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName));
-            }
-        }
-        return resultIdVal;
-    }
-
-    /**
-     * es 字段类型本地缓存
-     */
-    private static ConcurrentMap<String, Map<String, String>> esFieldTypes = new ConcurrentHashMap<>();
-
-    /**
-     * 获取es mapping中的属性类型
-     *
-     * @param mapping mapping配置
-     * @param fieldName 属性名
-     * @return 类型
-     */
-    @SuppressWarnings("unchecked")
-    private String getEsType(ESMapping mapping, String fieldName) {
-        String key = mapping.get_index() + "-" + mapping.get_type();
-        Map<String, String> fieldType = esFieldTypes.get(key);
-        if (fieldType == null) {
-            ImmutableOpenMap<String, MappingMetaData> mappings;
-            try {
-                mappings = transportClient.admin()
-                    .cluster()
-                    .prepareState()
-                    .execute()
-                    .actionGet()
-                    .getState()
-                    .getMetaData()
-                    .getIndices()
-                    .get(mapping.get_index())
-                    .getMappings();
-            } catch (NullPointerException e) {
-                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
-            }
-            MappingMetaData mappingMetaData = mappings.get(mapping.get_type());
-            if (mappingMetaData == null) {
-                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
-            }
-
-            fieldType = new LinkedHashMap<>();
-
-            Map<String, Object> sourceMap = mappingMetaData.getSourceAsMap();
-            Map<String, Object> esMapping = (Map<String, Object>) sourceMap.get("properties");
-            for (Map.Entry<String, Object> entry : esMapping.entrySet()) {
-                Map<String, Object> value = (Map<String, Object>) entry.getValue();
-                if (value.containsKey("properties")) {
-                    fieldType.put(entry.getKey(), "object");
-                } else {
-                    fieldType.put(entry.getKey(), (String) value.get("type"));
-                }
-            }
-            esFieldTypes.put(key, fieldType);
-        }
-
-        return fieldType.get(fieldName);
-    }
-}
+package com.alibaba.otter.canal.client.adapter.es.support;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+
+import javax.sql.DataSource;
+
+import com.alibaba.fastjson.JSON;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.cluster.metadata.MappingMetaData;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.UpdateByQueryAction;
+import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.util.CollectionUtils;
+
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+
+/**
+ * ES 操作模板
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESTemplate {
+
+    private static final Logger logger         = LoggerFactory.getLogger(ESTemplate.class);
+
+    private static final int    MAX_BATCH_SIZE = 1000;
+
+    private TransportClient     transportClient;
+
+    public ESTemplate(TransportClient transportClient){
+        this.transportClient = transportClient;
+    }
+
+    /**
+     * 插入数据
+     * 
+     * @param mapping
+     * @param pkVal
+     * @param esFieldData
+     * @return
+     */
+    public boolean insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setSource(esFieldData));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+            }
+            bulkRequestBuilder
+                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type()).setSource(esFieldData));
+        }
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    /**
+     * 根据主键更新数据
+     * 
+     * @param mapping
+     * @param pkVal
+     * @param esFieldData
+     * @return
+     */
+    public boolean update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        append4Update(bulkRequestBuilder, mapping, pkVal, esFieldData);
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal,
+                              Map<String, Object> esFieldData) {
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                        .setDoc(esFieldData));
+            }
+        }
+    }
+
+    /**
+     * update by query
+     *
+     * @param config
+     * @param paramsTmp
+     * @param esFieldData
+     * @return
+     */
+    public boolean updateByQuery(ESSyncConfig config, Map<String, Object> paramsTmp, Map<String, Object> esFieldData) {
+        if (paramsTmp.isEmpty()) {
+            return false;
+        }
+        ESMapping mapping = config.getEsMapping();
+        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
+        paramsTmp.forEach((fieldName, value) -> queryBuilder.must(QueryBuilders.termsQuery(fieldName, value)));
+
+        SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+            .setTypes(mapping.get_type())
+            .setSize(0)
+            .setQuery(queryBuilder)
+            .get();
+        long count = response.getHits().getTotalHits();
+        // 如果更新量大于Max, 查询sql批量更新
+        if (count > MAX_BATCH_SIZE) {
+            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+
+            DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            // 查询sql更新
+            StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE ");
+            paramsTmp.forEach(
+                (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
+            int len = sql.length();
+            sql.delete(len - 4, len);
+            ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+                int exeCount = 1;
+                try {
+                    BulkRequestBuilder bulkRequestBuilderTmp = bulkRequestBuilder;
+                    while (rs.next()) {
+                        Object idVal = getIdValFromRS(mapping, rs);
+                        append4Update(bulkRequestBuilderTmp, mapping, idVal, esFieldData);
+
+                        if (exeCount % mapping.getCommitBatch() == 0 && bulkRequestBuilderTmp.numberOfActions() > 0) {
+                            commitBulkRequest(bulkRequestBuilderTmp);
+                            bulkRequestBuilderTmp = transportClient.prepareBulk();
+                        }
+                        exeCount++;
+                    }
+
+                    if (bulkRequestBuilder.numberOfActions() > 0) {
+                        commitBulkRequest(bulkRequestBuilderTmp);
+                    }
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
+                return 0;
+            });
+            return true;
+        } else {
+            return updateByQuery(mapping, queryBuilder, esFieldData, 1);
+        }
+    }
+
+    private boolean updateByQuery(ESMapping mapping, QueryBuilder queryBuilder, Map<String, Object> esFieldData,
+                                  int counter) {
+        if (CollectionUtils.isEmpty(esFieldData)) {
+            return true;
+        }
+
+        StringBuilder sb = new StringBuilder();
+        esFieldData.forEach((key, value) -> {
+            if (value instanceof Map) {
+                Map<?, ?> mapValue = (Map<?, ?>) value;
+                if (mapValue.containsKey("lon") && mapValue.containsKey("lat") && mapValue.size() == 2) {
+                    sb.append("ctx._source")
+                        .append("['")
+                        .append(key)
+                        .append("']")
+                        .append(" = [")
+                        .append(mapValue.get("lon"))
+                        .append(", ")
+                        .append(mapValue.get("lat"))
+                        .append("];");
+                } else {
+                    sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
+                    sb.append(JSON.toJSONString(value));
+                    sb.append(";");
+                }
+            } else if (value instanceof List) {
+                sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
+                sb.append(JSON.toJSONString(value));
+                sb.append(";");
+            } else if (value instanceof String) {
+                sb.append("ctx._source")
+                    .append("['")
+                    .append(key)
+                    .append("']")
+                    .append(" = '")
+                    .append(value)
+                    .append("';");
+            } else {
+                sb.append("ctx._source").append("['").append(key).append("']").append(" = ").append(value).append(";");
+            }
+        });
+        String scriptLine = sb.toString();
+        if (logger.isTraceEnabled()) {
+            logger.trace(scriptLine);
+        }
+
+        UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(transportClient);
+        updateByQuery.source(mapping.get_index())
+            .abortOnVersionConflict(false)
+            .filter(queryBuilder)
+            .script(new Script(ScriptType.INLINE, "painless", scriptLine, Collections.emptyMap()));
+
+        BulkByScrollResponse response = updateByQuery.get();
+        if (logger.isTraceEnabled()) {
+            logger.trace("updateByQuery response: {}", response.getStatus());
+        }
+        if (!CollectionUtils.isEmpty(response.getSearchFailures())) {
+            logger.error("script update_for_search has search error: " + response.getBulkFailures());
+            return false;
+        }
+
+        if (!CollectionUtils.isEmpty(response.getBulkFailures())) {
+            logger.error("script update_for_search has update error: " + response.getBulkFailures());
+            return false;
+        }
+
+        if (response.getStatus().getVersionConflicts() > 0) {
+            if (counter >= 3) {
+                logger.error("第 {} 次执行updateByQuery, 依旧存在分片版本冲突,不再继续重试。", counter);
+                return false;
+            }
+            logger.warn("本次updateByQuery存在分片版本冲突,准备重新执行...");
+            try {
+                TimeUnit.SECONDS.sleep(1);
+            } catch (InterruptedException e) {
+                // ignore
+            }
+            return updateByQuery(mapping, queryBuilder, esFieldData, ++counter);
+        }
+
+        return true;
+    }
+
+    /**
+     * 通过主键删除数据
+     *
+     * @param mapping
+     * @param pkVal
+     * @return
+     */
+    public boolean delete(ESMapping mapping, Object pkVal) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), pkVal.toString()));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+            }
+        }
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    /**
+     * 批量提交
+     *
+     * @param bulkRequestBuilder
+     * @return
+     */
+    private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) {
+        if (bulkRequestBuilder.numberOfActions() > 0) {
+            BulkResponse response = bulkRequestBuilder.execute().actionGet();
+            if (response.hasFailures()) {
+                for (BulkItemResponse itemResponse : response.getItems()) {
+                    if (!itemResponse.isFailed()) {
+                        continue;
+                    }
+
+                    if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) {
+                        logger.warn(itemResponse.getFailureMessage());
+                    } else {
+                        logger.error("ES sync commit error: {}", itemResponse.getFailureMessage());
+                    }
+                }
+            }
+
+            return !response.hasFailures();
+        }
+        return true;
+    }
+
+    public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName,
+                               String columnName) throws SQLException {
+        String esType = getEsType(mapping, fieldName);
+
+        Object value = resultSet.getObject(columnName);
+        if (value instanceof Boolean) {
+            if (!"boolean".equals(esType)) {
+                value = resultSet.getByte(columnName);
+            }
+        }
+
+        // 如果是对象类型
+        if (mapping.getObjFields().containsKey(fieldName)) {
+            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
+        } else {
+            return ESSyncUtil.typeConvert(value, esType);
+        }
+    }
+
+    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet,
+                                  Map<String, Object> esFieldData) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+            }
+
+            if (!fieldItem.getFieldName().equals(mapping.get_id())
+                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(), value);
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+                break;
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map<String, Object> dmlOld,
+                                  Map<String, Object> esFieldData) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+            }
+
+            for (ColumnItem columnItem : fieldItem.getColumnItems()) {
+                if (dmlOld.containsKey(columnItem.getColumnName())
+                    && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                    esFieldData.put(fieldItem.getFieldName(),
+                        getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()));
+                    break;
+                }
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getValFromData(ESMapping mapping, Map<String, Object> dmlData, String fieldName, String columnName) {
+        String esType = getEsType(mapping, fieldName);
+        Object value = dmlData.get(columnName);
+        if (value instanceof Byte) {
+            if ("boolean".equals(esType)) {
+                value = ((Byte) value).intValue() != 0;
+            }
+        }
+
+        // 如果是对象类型
+        if (mapping.getObjFields().containsKey(fieldName)) {
+            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
+        } else {
+            return ESSyncUtil.typeConvert(value, esType);
+        }
+    }
+
+    /**
+     * 将dml的data转换为es的data
+     *
+     * @param mapping 配置mapping
+     * @param dmlData dml data
+     * @param esFieldData es data
+     * @return 返回 id 值
+     */
+    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData,
+                                       Map<String, Object> esFieldData) {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
+            Object value = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+            }
+
+            if (!fieldItem.getFieldName().equals(mapping.get_id())
+                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(), value);
+            }
+        }
+        return resultIdVal;
+    }
+
+    /**
+     * 将dml的data, old转换为es的data
+     *
+     * @param mapping 配置mapping
+     * @param dmlData dml data
+     * @param esFieldData es data
+     * @return 返回 id 值
+     */
+    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData, Map<String, Object> dmlOld,
+                                       Map<String, Object> esFieldData) {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
+            }
+
+            if (dmlOld.get(columnName) != null && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(),
+                    getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName));
+            }
+        }
+        return resultIdVal;
+    }
+
+    /**
+     * es 字段类型本地缓存
+     */
+    private static ConcurrentMap<String, Map<String, String>> esFieldTypes = new ConcurrentHashMap<>();
+
+    /**
+     * 获取es mapping中的属性类型
+     *
+     * @param mapping mapping配置
+     * @param fieldName 属性名
+     * @return 类型
+     */
+    @SuppressWarnings("unchecked")
+    private String getEsType(ESMapping mapping, String fieldName) {
+        String key = mapping.get_index() + "-" + mapping.get_type();
+        Map<String, String> fieldType = esFieldTypes.get(key);
+        if (fieldType == null) {
+            ImmutableOpenMap<String, MappingMetaData> mappings;
+            try {
+                mappings = transportClient.admin()
+                    .cluster()
+                    .prepareState()
+                    .execute()
+                    .actionGet()
+                    .getState()
+                    .getMetaData()
+                    .getIndices()
+                    .get(mapping.get_index())
+                    .getMappings();
+            } catch (NullPointerException e) {
+                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
+            }
+            MappingMetaData mappingMetaData = mappings.get(mapping.get_type());
+            if (mappingMetaData == null) {
+                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
+            }
+
+            fieldType = new LinkedHashMap<>();
+
+            Map<String, Object> sourceMap = mappingMetaData.getSourceAsMap();
+            Map<String, Object> esMapping = (Map<String, Object>) sourceMap.get("properties");
+            for (Map.Entry<String, Object> entry : esMapping.entrySet()) {
+                Map<String, Object> value = (Map<String, Object>) entry.getValue();
+                if (value.containsKey("properties")) {
+                    fieldType.put(entry.getKey(), "object");
+                } else {
+                    fieldType.put(entry.getKey(), (String) value.get("type"));
+                }
+            }
+            esFieldTypes.put(key, fieldType);
+        }
+
+        return fieldType.get(fieldName);
+    }
+}

+ 397 - 392
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java

@@ -1,392 +1,397 @@
-package com.alibaba.otter.canal.client.adapter.hbase.service;
-
-import java.sql.*;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
-
-import com.alibaba.otter.canal.client.adapter.support.Util;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.hbase.support.*;
-import com.alibaba.otter.canal.client.adapter.support.EtlResult;
-import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
-import com.google.common.base.Joiner;
-
-/**
- * HBase ETL 操作业务类
- *
- * @author rewerma @ 2018-10-20
- * @version 1.0.0
- */
-public class HbaseEtlService {
-
-    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
-
-
-    /**
-     * 建表
-     * 
-     * @param hbaseTemplate
-     * @param config
-     */
-    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
-        try {
-            // 判断hbase表是否存在,不存在则建表
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 导入数据
-     * 
-     * @param ds 数据源
-     * @param hbaseTemplate hbaseTemplate
-     * @param config 配置
-     * @param params 筛选条件
-     * @return 导入结果
-     */
-    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
-                                       List<String> params) {
-        EtlResult etlResult = new EtlResult();
-        AtomicLong successCount = new AtomicLong();
-        List<String> errMsg = new ArrayList<>();
-        String hbaseTable = "";
-        try {
-            if (config == null) {
-                logger.error("Config is null!");
-                etlResult.setSucceeded(false);
-                etlResult.setErrorMessage("Config is null!");
-                return etlResult;
-            }
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            hbaseTable = hbaseMapping.getHbaseTable();
-
-            long start = System.currentTimeMillis();
-
-            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
-                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
-                // 如果表存在则删除
-                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
-                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
-                }
-                params = null;
-            } else {
-                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
-            }
-            createTable(hbaseTemplate, config);
-
-            // 拼接sql
-            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
-
-            // 拼接条件
-            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
-                AtomicBoolean stExists = new AtomicBoolean(false);
-                // 验证是否有SYS_TIME字段
-                Util.sqlRS(ds, sql, rs -> {
-                    try {
-                        ResultSetMetaData rsmd = rs.getMetaData();
-                        int cnt = rsmd.getColumnCount();
-                        for (int i = 1; i <= cnt; i++) {
-                            String columnName = rsmd.getColumnName(i);
-                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
-                                stExists.set(true);
-                                break;
-                            }
-                        }
-                    } catch (Exception e) {
-                        // ignore
-                    }
-                    return null;
-                });
-                if (stExists.get()) {
-                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
-                }
-            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
-                String etlCondition = hbaseMapping.getEtlCondition();
-                int size = params.size();
-                for (int i = 0; i < size; i++) {
-                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
-                }
-
-                sql += " " + etlCondition;
-            }
-
-            // 获取总数
-            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
-            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
-                Long count = null;
-                try {
-                    if (rs.next()) {
-                        count = ((Number) rs.getObject(1)).longValue();
-                    }
-                } catch (Exception e) {
-                    logger.error(e.getMessage(), e);
-                }
-                return count == null ? 0 : count;
-            });
-
-            // 当大于1万条记录时开启多线程
-            if (cnt >= 10000) {
-                int threadCount = 3;
-                long perThreadCnt = cnt / threadCount;
-                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
-                for (int i = 0; i < threadCount; i++) {
-                    long offset = i * perThreadCnt;
-                    Long size = null;
-                    if (i != threadCount - 1) {
-                        size = perThreadCnt;
-                    }
-                    String sqlFinal;
-                    if (size != null) {
-                        sqlFinal = sql + " LIMIT " + offset + "," + size;
-                    } else {
-                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
-                    }
-                    Future<Boolean> future = executor.submit(
-                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
-                    futures.add(future);
-                }
-
-                for (Future<Boolean> future : futures) {
-                    future.get();
-                }
-
-                executor.shutdown();
-            } else {
-                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
-            }
-
-            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
-                        + (System.currentTimeMillis() - start) / 1000 + "s!");
-
-            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
-        }
-
-        if (errMsg.isEmpty()) {
-            etlResult.setSucceeded(true);
-        } else {
-            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
-        }
-        return etlResult;
-    }
-
-    /**
-     * 执行导入
-     * 
-     * @param ds
-     * @param sql
-     * @param hbaseMapping
-     * @param hbaseTemplate
-     * @param successCount
-     * @param errMsg
-     * @return
-     */
-    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
-                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
-        try {
-            Util.sqlRS(ds, sql, rs -> {
-                int i = 1;
-
-                try {
-                    boolean complete = false;
-                    List<HRow> rows = new ArrayList<>();
-                    String[] rowKeyColumns = null;
-                    if (hbaseMapping.getRowKey() != null) {
-                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
-                    }
-                    while (rs.next()) {
-                        int cc = rs.getMetaData().getColumnCount();
-                        int[] jdbcTypes = new int[cc];
-                        Class<?>[] classes = new Class[cc];
-                        for (int j = 1; j <= cc; j++) {
-                            int jdbcType = rs.getMetaData().getColumnType(j);
-                            jdbcTypes[j - 1] = jdbcType;
-                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
-                        }
-                        HRow row = new HRow();
-
-                        if (rowKeyColumns != null) {
-                            // 取rowKey字段拼接
-                            StringBuilder rowKeyVale = new StringBuilder();
-                            for (String rowKeyColumnName : rowKeyColumns) {
-                                Object obj = rs.getObject(rowKeyColumnName);
-                                if (obj != null) {
-                                    rowKeyVale.append(obj.toString());
-                                }
-                                rowKeyVale.append("|");
-                            }
-                            int len = rowKeyVale.length();
-                            if (len > 0) {
-                                rowKeyVale.delete(len - 1, len);
-                            }
-                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
-                        }
-
-                        for (int j = 1; j <= cc; j++) {
-                            String columnName = rs.getMetaData().getColumnName(j);
-
-                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
-                            if (val == null) {
-                                continue;
-                            }
-
-                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
-                            // 没有配置映射
-                            if (columnItem == null) {
-                                String family = hbaseMapping.getFamily();
-                                String qualifile = columnName;
-                                if (hbaseMapping.isUppercaseQualifier()) {
-                                    qualifile = qualifile.toUpperCase();
-                                }
-                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(Bytes.toBytes(val.toString()));
-                                    } else {
-                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
-                                    }
-                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                    Type type = Type.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(TypeUtil.toBytes(val, type));
-                                    } else {
-                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
-                                    }
-                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                    PhType phType = PhType.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                    } else {
-                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
-                                    }
-                                }
-                            } else {
-                                // 如果不需要类型转换
-                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
-                                    if (val instanceof java.sql.Date) {
-                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
-                                        val = dateFmt.format((Date) val);
-                                    } else if (val instanceof Timestamp) {
-                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-                                        val = datetimeFmt.format((Date) val);
-                                    }
-
-                                    byte[] valBytes = Bytes.toBytes(val.toString());
-                                    if (columnItem.isRowKey()) {
-                                        if (columnItem.getRowKeyLen() != null) {
-                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.setRowKey(valBytes);
-                                        }
-                                    } else {
-                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                    }
-                                } else {
-                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                        byte[] valBytes = Bytes.toBytes(val.toString());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            }
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                        }
-                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                        Type type = Type.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
-                                                row.setRowKey(Bytes.toBytes(v));
-                                            } else {
-                                                row.setRowKey(TypeUtil.toBytes(val, type));
-                                            }
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                TypeUtil.toBytes(val, type));
-                                        }
-                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                        PhType phType = PhType.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                PhTypeUtil.toBytes(val, phType));
-                                        }
-                                    }
-                                }
-                            }
-                        }
-
-                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
-
-                        rows.add(row);
-                        complete = false;
-                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
-                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                            rows.clear();
-                            complete = true;
-                        }
-                        i++;
-                        successCount.incrementAndGet();
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("successful import count:" + successCount.get());
-                        }
-                    }
-
-                    if (!complete && !rows.isEmpty()) {
-                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                    }
-
-                } catch (Exception e) {
-                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
-                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
-                    // throw new RuntimeException(e);
-                }
-                return i;
-            });
-            return true;
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            return false;
-        }
-    }
-
-    private static String limitLenNum(int len, Object val) {
-        if (val == null) {
-            return null;
-        }
-        if (val instanceof Number) {
-            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
-        } else if (val instanceof String) {
-            return String.format("%0" + len + "d", Long.parseLong((String) val));
-        }
-        return null;
-    }
-}
+package com.alibaba.otter.canal.client.adapter.hbase.service;
+
+import java.sql.ResultSetMetaData;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import javax.sql.DataSource;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HRow;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhType;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhTypeUtil;
+import com.alibaba.otter.canal.client.adapter.hbase.support.Type;
+import com.alibaba.otter.canal.client.adapter.hbase.support.TypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+import com.google.common.base.Joiner;
+
+/**
+ * HBase ETL 操作业务类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class HbaseEtlService {
+
+    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
+
+
+    /**
+     * 建表
+     * 
+     * @param hbaseTemplate
+     * @param config
+     */
+    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
+        try {
+            // 判断hbase表是否存在,不存在则建表
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 导入数据
+     * 
+     * @param ds 数据源
+     * @param hbaseTemplate hbaseTemplate
+     * @param config 配置
+     * @param params 筛选条件
+     * @return 导入结果
+     */
+    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
+                                       List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong successCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String hbaseTable = "";
+        try {
+            if (config == null) {
+                logger.error("Config is null!");
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("Config is null!");
+                return etlResult;
+            }
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            hbaseTable = hbaseMapping.getHbaseTable();
+
+            long start = System.currentTimeMillis();
+
+            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
+                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
+                // 如果表存在则删除
+                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
+                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
+                }
+                params = null;
+            } else {
+                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
+            }
+            createTable(hbaseTemplate, config);
+
+            // 拼接sql
+            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
+
+            // 拼接条件
+            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
+                AtomicBoolean stExists = new AtomicBoolean(false);
+                // 验证是否有SYS_TIME字段
+                Util.sqlRS(ds, sql, rs -> {
+                    try {
+                        ResultSetMetaData rsmd = rs.getMetaData();
+                        int cnt = rsmd.getColumnCount();
+                        for (int i = 1; i <= cnt; i++) {
+                            String columnName = rsmd.getColumnName(i);
+                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
+                                stExists.set(true);
+                                break;
+                            }
+                        }
+                    } catch (Exception e) {
+                        // ignore
+                    }
+                    return null;
+                });
+                if (stExists.get()) {
+                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
+                }
+            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
+                String etlCondition = hbaseMapping.getEtlCondition();
+                int size = params.size();
+                for (int i = 0; i < size; i++) {
+                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+                }
+
+                sql += " " + etlCondition;
+            }
+
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0 : count;
+            });
+
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3;
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                    }
+                    Future<Boolean> future = executor.submit(
+                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
+                    futures.add(future);
+                }
+
+                for (Future<Boolean> future : futures) {
+                    future.get();
+                }
+
+                executor.shutdown();
+            } else {
+                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
+            }
+
+            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
+                        + (System.currentTimeMillis() - start) / 1000 + "s!");
+
+            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
+        }
+
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    /**
+     * 执行导入
+     * 
+     * @param ds
+     * @param sql
+     * @param hbaseMapping
+     * @param hbaseTemplate
+     * @param successCount
+     * @param errMsg
+     * @return
+     */
+    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
+                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
+        try {
+            Util.sqlRS(ds, sql, rs -> {
+                int i = 1;
+
+                try {
+                    boolean complete = false;
+                    List<HRow> rows = new ArrayList<>();
+                    String[] rowKeyColumns = null;
+                    if (hbaseMapping.getRowKey() != null) {
+                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                    }
+                    while (rs.next()) {
+                        int cc = rs.getMetaData().getColumnCount();
+                        int[] jdbcTypes = new int[cc];
+                        Class<?>[] classes = new Class[cc];
+                        for (int j = 1; j <= cc; j++) {
+                            int jdbcType = rs.getMetaData().getColumnType(j);
+                            jdbcTypes[j - 1] = jdbcType;
+                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
+                        }
+                        HRow row = new HRow();
+
+                        if (rowKeyColumns != null) {
+                            // 取rowKey字段拼接
+                            StringBuilder rowKeyVale = new StringBuilder();
+                            for (String rowKeyColumnName : rowKeyColumns) {
+                                Object obj = rs.getObject(rowKeyColumnName);
+                                if (obj != null) {
+                                    rowKeyVale.append(obj.toString());
+                                }
+                                rowKeyVale.append("|");
+                            }
+                            int len = rowKeyVale.length();
+                            if (len > 0) {
+                                rowKeyVale.delete(len - 1, len);
+                            }
+                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
+                        }
+
+                        for (int j = 1; j <= cc; j++) {
+                            String columnName = rs.getMetaData().getColumnName(j);
+
+                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
+                            if (val == null) {
+                                continue;
+                            }
+
+                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
+                            // 没有配置映射
+                            if (columnItem == null) {
+                                String family = hbaseMapping.getFamily();
+                                String qualifile = columnName;
+                                if (hbaseMapping.isUppercaseQualifier()) {
+                                    qualifile = qualifile.toUpperCase();
+                                }
+                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(Bytes.toBytes(val.toString()));
+                                    } else {
+                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
+                                    }
+                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                    Type type = Type.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(TypeUtil.toBytes(val, type));
+                                    } else {
+                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
+                                    }
+                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                    PhType phType = PhType.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                    } else {
+                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
+                                    }
+                                }
+                            } else {
+                                // 如果不需要类型转换
+                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
+                                    if (val instanceof java.sql.Date) {
+                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
+                                        val = dateFmt.format((Date) val);
+                                    } else if (val instanceof Timestamp) {
+                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+                                        val = datetimeFmt.format((Date) val);
+                                    }
+
+                                    byte[] valBytes = Bytes.toBytes(val.toString());
+                                    if (columnItem.isRowKey()) {
+                                        if (columnItem.getRowKeyLen() != null) {
+                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.setRowKey(valBytes);
+                                        }
+                                    } else {
+                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                    }
+                                } else {
+                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                        byte[] valBytes = Bytes.toBytes(val.toString());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            }
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                        }
+                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                        Type type = Type.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
+                                                row.setRowKey(Bytes.toBytes(v));
+                                            } else {
+                                                row.setRowKey(TypeUtil.toBytes(val, type));
+                                            }
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                TypeUtil.toBytes(val, type));
+                                        }
+                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                        PhType phType = PhType.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                PhTypeUtil.toBytes(val, phType));
+                                        }
+                                    }
+                                }
+                            }
+                        }
+
+                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
+
+                        rows.add(row);
+                        complete = false;
+                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
+                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                            rows.clear();
+                            complete = true;
+                        }
+                        i++;
+                        successCount.incrementAndGet();
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("successful import count:" + successCount.get());
+                        }
+                    }
+
+                    if (!complete && !rows.isEmpty()) {
+                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
+                    // throw new RuntimeException(e);
+                }
+                return i;
+            });
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private static String limitLenNum(int len, Object val) {
+        if (val == null) {
+            return null;
+        }
+        if (val instanceof Number) {
+            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
+        } else if (val instanceof String) {
+            return String.format("%0" + len + "d", Long.parseLong((String) val));
+        }
+        return null;
+    }
+}

+ 216 - 216
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java

@@ -1,216 +1,216 @@
-package com.alibaba.otter.canal.adapter.launcher.loader;
-
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.*;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.core.env.EnumerablePropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.core.env.PropertySource;
-import org.springframework.core.env.StandardEnvironment;
-
-import com.alibaba.otter.canal.adapter.launcher.config.SpringContext;
-import com.alibaba.otter.canal.client.adapter.OuterAdapter;
-import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
-import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
-import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
-
-/**
- * 外部适配器的加载器
- *
- * @version 1.0.0
- */
-public class CanalAdapterLoader {
-
-    private static final Logger                     logger        = LoggerFactory.getLogger(CanalAdapterLoader.class);
-
-    private CanalClientConfig                       canalClientConfig;
-
-    private Map<String, CanalAdapterWorker>         canalWorkers  = new HashMap<>();
-
-    private Map<String, AbstractCanalAdapterWorker> canalMQWorker = new HashMap<>();
-
-    private ExtensionLoader<OuterAdapter>           loader;
-
-    public CanalAdapterLoader(CanalClientConfig canalClientConfig){
-        this.canalClientConfig = canalClientConfig;
-    }
-
-    /**
-     * 初始化canal-client
-     */
-    public void init() {
-        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
-
-        String canalServerHost = this.canalClientConfig.getCanalServerHost();
-        SocketAddress sa = null;
-        if (canalServerHost != null) {
-            String[] ipPort = canalServerHost.split(":");
-            sa = new InetSocketAddress(ipPort[0], Integer.parseInt(ipPort[1]));
-        }
-        String zkHosts = this.canalClientConfig.getZookeeperHosts();
-
-        if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-
-                for (CanalClientConfig.Group connectorGroup : canalAdapter.getGroups()) {
-                    List<OuterAdapter> canalOutConnectors = new ArrayList<>();
-                    for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) {
-                        loadAdapter(c, canalOutConnectors);
-                    }
-                    canalOuterAdapterGroups.add(canalOutConnectors);
-                }
-                CanalAdapterWorker worker;
-                if (sa != null) {
-                    worker = new CanalAdapterWorker(canalClientConfig,
-                        canalAdapter.getInstance(),
-                        sa,
-                        canalOuterAdapterGroups);
-                } else if (zkHosts != null) {
-                    worker = new CanalAdapterWorker(canalClientConfig,
-                        canalAdapter.getInstance(),
-                        zkHosts,
-                        canalOuterAdapterGroups);
-                } else {
-                    throw new RuntimeException("No canal server connector found");
-                }
-                canalWorkers.put(canalAdapter.getInstance(), worker);
-                worker.start();
-                logger.info("Start adapter for canal instance: {} succeed", canalAdapter.getInstance());
-            }
-        } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client-kafka的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
-                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
-                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
-                        loadAdapter(config, canalOuterAdapters);
-                    }
-                    canalOuterAdapterGroups.add(canalOuterAdapters);
-
-                    CanalAdapterKafkaWorker canalKafkaWorker = new CanalAdapterKafkaWorker(canalClientConfig,
-                        canalClientConfig.getMqServers(),
-                        canalAdapter.getInstance(),
-                        group.getGroupId(),
-                        canalOuterAdapterGroups,
-                        canalClientConfig.getFlatMessage());
-                    canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + group.getGroupId(), canalKafkaWorker);
-                    canalKafkaWorker.start();
-                    logger.info("Start adapter for canal-client mq topic: {} succeed",
-                        canalAdapter.getInstance() + "-" + group.getGroupId());
-                }
-            }
-        } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client-rocketMQ的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
-                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
-                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
-                        loadAdapter(config, canalOuterAdapters);
-                    }
-                    canalOuterAdapterGroups.add(canalOuterAdapters);
-                    CanalAdapterRocketMQWorker rocketMQWorker = new CanalAdapterRocketMQWorker(canalClientConfig,
-                        canalClientConfig.getMqServers(),
-                        canalAdapter.getInstance(),
-                        group.getGroupId(),
-                        canalOuterAdapterGroups,
-                        canalClientConfig.getAccessKey(),
-                        canalClientConfig.getSecretKey(),
-                        canalClientConfig.getFlatMessage());
-                    canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + group.getGroupId(), rocketMQWorker);
-                    rocketMQWorker.start();
-
-                    logger.info("Start adapter for canal-client mq topic: {} succeed",
-                        canalAdapter.getInstance() + "-" + group.getGroupId());
-                }
-            }
-        }
-    }
-
-    private void loadAdapter(OuterAdapterConfig config, List<OuterAdapter> canalOutConnectors) {
-        try {
-            OuterAdapter adapter;
-            adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey()));
-
-            ClassLoader cl = Thread.currentThread().getContextClassLoader();
-            // 替换ClassLoader
-            Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader());
-            Environment env = (Environment) SpringContext.getBean(Environment.class);
-            Properties evnProperties = null;
-            if (env instanceof StandardEnvironment) {
-                evnProperties = new Properties();
-                for (PropertySource<?> propertySource : ((StandardEnvironment) env).getPropertySources()) {
-                    if (propertySource instanceof EnumerablePropertySource) {
-                        String[] names = ((EnumerablePropertySource) propertySource).getPropertyNames();
-                        for (String name : names) {
-                            Object val = propertySource.getProperty(name);
-                            if (val != null) {
-                                evnProperties.put(name, val);
-                            }
-                        }
-                    }
-                }
-            }
-            adapter.init(config, evnProperties);
-            Thread.currentThread().setContextClassLoader(cl);
-            canalOutConnectors.add(adapter);
-            logger.info("Load canal adapter: {} succeed", config.getName());
-        } catch (Exception e) {
-            logger.error("Load canal adapter: {} failed", config.getName(), e);
-        }
-    }
-
-    /**
-     * 销毁所有适配器 为防止canal实例太多造成销毁阻塞, 并行销毁
-     */
-    public void destroy() {
-        if (!canalWorkers.isEmpty()) {
-            ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
-            for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
-                futures.add(stopExecutorService.submit(() -> {
-                    canalAdapterWorker.stop();
-                    return true;
-                }));
-            }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
-                    // ignore
-                }
-            });
-            stopExecutorService.shutdown();
-        }
-
-        if (!canalMQWorker.isEmpty()) {
-            ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
-            for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
-                futures.add(stopMQWorkerService.submit(() -> {
-                    canalAdapterMQWorker.stop();
-                    return true;
-                }));
-            }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
-                    // ignore
-                }
-            });
-            stopMQWorkerService.shutdown();
-        }
-        logger.info("All canal adapters destroyed");
-    }
-}
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.*;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.core.env.EnumerablePropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.core.env.PropertySource;
+import org.springframework.core.env.StandardEnvironment;
+
+import com.alibaba.otter.canal.adapter.launcher.config.SpringContext;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+
+/**
+ * 外部适配器的加载器
+ *
+ * @version 1.0.0
+ */
+public class CanalAdapterLoader {
+
+    private static final Logger                     logger        = LoggerFactory.getLogger(CanalAdapterLoader.class);
+
+    private CanalClientConfig                       canalClientConfig;
+
+    private Map<String, CanalAdapterWorker>         canalWorkers  = new HashMap<>();
+
+    private Map<String, AbstractCanalAdapterWorker> canalMQWorker = new HashMap<>();
+
+    private ExtensionLoader<OuterAdapter>           loader;
+
+    public CanalAdapterLoader(CanalClientConfig canalClientConfig){
+        this.canalClientConfig = canalClientConfig;
+    }
+
+    /**
+     * 初始化canal-client
+     */
+    public void init() {
+        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
+
+        String canalServerHost = this.canalClientConfig.getCanalServerHost();
+        SocketAddress sa = null;
+        if (canalServerHost != null) {
+            String[] ipPort = canalServerHost.split(":");
+            sa = new InetSocketAddress(ipPort[0], Integer.parseInt(ipPort[1]));
+        }
+        String zkHosts = this.canalClientConfig.getZookeeperHosts();
+
+        if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+
+                for (CanalClientConfig.Group connectorGroup : canalAdapter.getGroups()) {
+                    List<OuterAdapter> canalOutConnectors = new ArrayList<>();
+                    for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) {
+                        loadAdapter(c, canalOutConnectors);
+                    }
+                    canalOuterAdapterGroups.add(canalOutConnectors);
+                }
+                CanalAdapterWorker worker;
+                if (sa != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        sa,
+                        canalOuterAdapterGroups);
+                } else if (zkHosts != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        zkHosts,
+                        canalOuterAdapterGroups);
+                } else {
+                    throw new RuntimeException("No canal server connector found");
+                }
+                canalWorkers.put(canalAdapter.getInstance(), worker);
+                worker.start();
+                logger.info("Start adapter for canal instance: {} succeed", canalAdapter.getInstance());
+            }
+        } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-kafka的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadAdapter(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+
+                    CanalAdapterKafkaWorker canalKafkaWorker = new CanalAdapterKafkaWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + group.getGroupId(), canalKafkaWorker);
+                    canalKafkaWorker.start();
+                    logger.info("Start adapter for canal-client mq topic: {} succeed",
+                        canalAdapter.getInstance() + "-" + group.getGroupId());
+                }
+            }
+        } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-rocketMQ的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadAdapter(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+                    CanalAdapterRocketMQWorker rocketMQWorker = new CanalAdapterRocketMQWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getAccessKey(),
+                        canalClientConfig.getSecretKey(),
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + group.getGroupId(), rocketMQWorker);
+                    rocketMQWorker.start();
+
+                    logger.info("Start adapter for canal-client mq topic: {} succeed",
+                        canalAdapter.getInstance() + "-" + group.getGroupId());
+                }
+            }
+        }
+    }
+
+    private void loadAdapter(OuterAdapterConfig config, List<OuterAdapter> canalOutConnectors) {
+        try {
+            OuterAdapter adapter;
+            adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey()));
+
+            ClassLoader cl = Thread.currentThread().getContextClassLoader();
+            // 替换ClassLoader
+            Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader());
+            Environment env = (Environment) SpringContext.getBean(Environment.class);
+            Properties evnProperties = null;
+            if (env instanceof StandardEnvironment) {
+                evnProperties = new Properties();
+                for (PropertySource<?> propertySource : ((StandardEnvironment) env).getPropertySources()) {
+                    if (propertySource instanceof EnumerablePropertySource) {
+                        String[] names = ((EnumerablePropertySource<?>) propertySource).getPropertyNames();
+                        for (String name : names) {
+                            Object val = propertySource.getProperty(name);
+                            if (val != null) {
+                                evnProperties.put(name, val);
+                            }
+                        }
+                    }
+                }
+            }
+            adapter.init(config, evnProperties);
+            Thread.currentThread().setContextClassLoader(cl);
+            canalOutConnectors.add(adapter);
+            logger.info("Load canal adapter: {} succeed", config.getName());
+        } catch (Exception e) {
+            logger.error("Load canal adapter: {} failed", config.getName(), e);
+        }
+    }
+
+    /**
+     * 销毁所有适配器 为防止canal实例太多造成销毁阻塞, 并行销毁
+     */
+    public void destroy() {
+        if (!canalWorkers.isEmpty()) {
+            ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
+                futures.add(stopExecutorService.submit(() -> {
+                    canalAdapterWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopExecutorService.shutdown();
+        }
+
+        if (!canalMQWorker.isEmpty()) {
+            ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
+                futures.add(stopMQWorkerService.submit(() -> {
+                    canalAdapterMQWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopMQWorkerService.shutdown();
+        }
+        logger.info("All canal adapters destroyed");
+    }
+}

+ 246 - 248
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java

@@ -1,248 +1,246 @@
-package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
-
-import java.io.FileWriter;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.druid.pool.DruidDataSource;
-import com.alibaba.otter.canal.common.utils.CommonUtils;
-import com.alibaba.otter.canal.common.utils.NamedThreadFactory;
-import com.google.common.base.Joiner;
-import com.google.common.collect.MapMaker;
-
-/**
- * 基于数据库的远程配置装载器
- *
- * @author rewerma 2019-01-25 下午05:20:16
- * @version 1.0.0
- */
-public class DbRemoteConfigLoader implements RemoteConfigLoader {
-
-    private static final Logger      logger                 = LoggerFactory.getLogger(DbRemoteConfigLoader.class);
-
-    private DruidDataSource          dataSource;
-
-    private static volatile long     currentConfigTimestamp = 0;
-    private Map<String, ConfigItem>  remoteAdapterConfigs   = new MapMaker().makeMap();
-
-    private ScheduledExecutorService executor               = Executors.newScheduledThreadPool(2,
-        new NamedThreadFactory("remote-adapter-config-scan"));
-
-    private RemoteAdapterMonitor     remoteAdapterMonitor   = new RemoteAdapterMonitorImpl();
-
-    public DbRemoteConfigLoader(String driverName, String jdbcUrl, String jdbcUsername, String jdbcPassword){
-        dataSource = new DruidDataSource();
-        if (StringUtils.isEmpty(driverName)) {
-            driverName = "com.mysql.jdbc.Driver";
-        }
-        dataSource.setDriverClassName(driverName);
-        dataSource.setUrl(jdbcUrl);
-        dataSource.setUsername(jdbcUsername);
-        dataSource.setPassword(jdbcPassword);
-        dataSource.setInitialSize(1);
-        dataSource.setMinIdle(1);
-        dataSource.setMaxActive(1);
-        dataSource.setMaxWait(60000);
-        dataSource.setTimeBetweenEvictionRunsMillis(60000);
-        dataSource.setMinEvictableIdleTimeMillis(300000);
-        try {
-            dataSource.init();
-        } catch (SQLException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载远程application.yml配置
-     */
-    @Override
-    public void loadRemoteConfig() {
-        try {
-            // 加载远程adapter配置
-            ConfigItem configItem = getRemoteAdapterConfig();
-            if (configItem != null) {
-                if (configItem.getModifiedTime() != currentConfigTimestamp) {
-                    currentConfigTimestamp = configItem.getModifiedTime();
-                    overrideLocalCanalConfig(configItem.getContent());
-                    logger.info("## Loaded remote adapter config: application.yml");
-                }
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 获取远程application.yml配置
-     *
-     * @return 配置对象
-     */
-    private ConfigItem getRemoteAdapterConfig() {
-        String sql = "select name, content, modified_time from canal_config where id=2";
-        try (Connection conn = dataSource.getConnection();
-                Statement stmt = conn.createStatement();
-                ResultSet rs = stmt.executeQuery(sql)) {
-            if (rs.next()) {
-                ConfigItem configItem = new ConfigItem();
-                configItem.setId(2L);
-                configItem.setName(rs.getString("name"));
-                configItem.setContent(rs.getString("content"));
-                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-                return configItem;
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-        return null;
-    }
-
-    /**
-     * 覆盖本地application.yml文件
-     *
-     * @param content 文件内容
-     */
-    private void overrideLocalCanalConfig(String content) {
-        try (FileWriter writer = new FileWriter(CommonUtils.getConfPath() + "application.yml")) {
-            writer.write(content);
-            writer.flush();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载adapter配置
-     */
-    @Override
-    public void loadRemoteAdapterConfigs() {
-        try {
-            // 加载远程adapter配置
-            loadModifiedAdapterConfigs();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载有变动的adapter配置
-     */
-    @SuppressWarnings("unchecked")
-    private void loadModifiedAdapterConfigs() {
-        Map<String, ConfigItem>[] res = new Map[2];
-        Map<String, ConfigItem> remoteConfigStatus = new HashMap<>();
-        String sql = "select id, category, name, modified_time from canal_adapter_config";
-        try (Connection conn = dataSource.getConnection();
-                Statement stmt = conn.createStatement();
-                ResultSet rs = stmt.executeQuery(sql)) {
-            while (rs.next()) {
-                ConfigItem configItem = new ConfigItem();
-                configItem.setId(rs.getLong("id"));
-                configItem.setCategory(rs.getString("category"));
-                configItem.setName(rs.getString("name"));
-                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-                remoteConfigStatus.put(configItem.getCategory() + "/" + configItem.getName(), configItem);
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-
-        if (!remoteConfigStatus.isEmpty()) {
-            List<Long> changedIds = new ArrayList<>();
-
-            for (ConfigItem remoteConfigStat : remoteConfigStatus.values()) {
-                ConfigItem currentConfig = remoteAdapterConfigs
-                    .get(remoteConfigStat.getCategory() + "/" + remoteConfigStat.getName());
-                if (currentConfig == null) {
-                    // 新增
-                    changedIds.add(remoteConfigStat.getId());
-                } else {
-                    // 修改
-                    if (currentConfig.getModifiedTime() != remoteConfigStat.getModifiedTime()) {
-                        changedIds.add(remoteConfigStat.getId());
-                    }
-                }
-            }
-            if (!changedIds.isEmpty()) {
-                String contentsSql = "select id, category, name, content, modified_time from canal_adapter_config  where id in ("
-                                     + Joiner.on(",").join(changedIds) + ")";
-                try (Connection conn = dataSource.getConnection();
-                        Statement stmt = conn.createStatement();
-                        ResultSet rs = stmt.executeQuery(contentsSql)) {
-                    while (rs.next()) {
-                        ConfigItem configItemNew = new ConfigItem();
-                        configItemNew.setId(rs.getLong("id"));
-                        configItemNew.setCategory(rs.getString("category"));
-                        configItemNew.setName(rs.getString("name"));
-                        configItemNew.setContent(rs.getString("content"));
-                        configItemNew.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-
-                        remoteAdapterConfigs.put(configItemNew.getCategory() + "/" + configItemNew.getName(),
-                            configItemNew);
-                        remoteAdapterMonitor.onModify(configItemNew);
-                    }
-
-                } catch (Exception e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
-        }
-
-        for (ConfigItem configItem : remoteAdapterConfigs.values()) {
-            if (!remoteConfigStatus.containsKey(configItem.getCategory() + "/" + configItem.getName())) {
-                // 删除
-                remoteAdapterConfigs.remove(configItem.getCategory() + "/" + configItem.getName());
-                remoteAdapterMonitor.onDelete(configItem.getCategory() + "/" + configItem.getName());
-            }
-        }
-    }
-
-    /**
-     * 启动监听数据库变化
-     */
-    @Override
-    public void startMonitor() {
-        // 监听application.yml变化
-        executor.scheduleWithFixedDelay(() -> {
-            try {
-                loadRemoteConfig();
-            } catch (Throwable e) {
-                logger.error("scan remote application.yml failed", e);
-            }
-        }, 10, 3, TimeUnit.SECONDS);
-
-        // 监听adapter变化
-        executor.scheduleWithFixedDelay(() -> {
-            try {
-                loadRemoteAdapterConfigs();
-            } catch (Throwable e) {
-                logger.error("scan remote adapter configs failed", e);
-            }
-        }, 10, 3, TimeUnit.SECONDS);
-    }
-
-    /**
-     * 销毁
-     */
-    @Override
-    public void destroy() {
-        executor.shutdownNow();
-        try {
-            dataSource.close();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-}
+package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
+
+import java.io.FileWriter;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.common.utils.CommonUtils;
+import com.alibaba.otter.canal.common.utils.NamedThreadFactory;
+import com.google.common.base.Joiner;
+import com.google.common.collect.MapMaker;
+
+/**
+ * 基于数据库的远程配置装载器
+ *
+ * @author rewerma 2019-01-25 下午05:20:16
+ * @version 1.0.0
+ */
+public class DbRemoteConfigLoader implements RemoteConfigLoader {
+
+    private static final Logger      logger                 = LoggerFactory.getLogger(DbRemoteConfigLoader.class);
+
+    private DruidDataSource          dataSource;
+
+    private static volatile long     currentConfigTimestamp = 0;
+    private Map<String, ConfigItem>  remoteAdapterConfigs   = new MapMaker().makeMap();
+
+    private ScheduledExecutorService executor               = Executors.newScheduledThreadPool(2,
+        new NamedThreadFactory("remote-adapter-config-scan"));
+
+    private RemoteAdapterMonitor     remoteAdapterMonitor   = new RemoteAdapterMonitorImpl();
+
+    public DbRemoteConfigLoader(String driverName, String jdbcUrl, String jdbcUsername, String jdbcPassword){
+        dataSource = new DruidDataSource();
+        if (StringUtils.isEmpty(driverName)) {
+            driverName = "com.mysql.jdbc.Driver";
+        }
+        dataSource.setDriverClassName(driverName);
+        dataSource.setUrl(jdbcUrl);
+        dataSource.setUsername(jdbcUsername);
+        dataSource.setPassword(jdbcPassword);
+        dataSource.setInitialSize(1);
+        dataSource.setMinIdle(1);
+        dataSource.setMaxActive(1);
+        dataSource.setMaxWait(60000);
+        dataSource.setTimeBetweenEvictionRunsMillis(60000);
+        dataSource.setMinEvictableIdleTimeMillis(300000);
+        try {
+            dataSource.init();
+        } catch (SQLException e) {
+            throw new RuntimeException(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载远程application.yml配置
+     */
+    @Override
+    public void loadRemoteConfig() {
+        try {
+            // 加载远程adapter配置
+            ConfigItem configItem = getRemoteAdapterConfig();
+            if (configItem != null) {
+                if (configItem.getModifiedTime() != currentConfigTimestamp) {
+                    currentConfigTimestamp = configItem.getModifiedTime();
+                    overrideLocalCanalConfig(configItem.getContent());
+                    logger.info("## Loaded remote adapter config: application.yml");
+                }
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 获取远程application.yml配置
+     *
+     * @return 配置对象
+     */
+    private ConfigItem getRemoteAdapterConfig() {
+        String sql = "select name, content, modified_time from canal_config where id=2";
+        try (Connection conn = dataSource.getConnection();
+                Statement stmt = conn.createStatement();
+                ResultSet rs = stmt.executeQuery(sql)) {
+            if (rs.next()) {
+                ConfigItem configItem = new ConfigItem();
+                configItem.setId(2L);
+                configItem.setName(rs.getString("name"));
+                configItem.setContent(rs.getString("content"));
+                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+                return configItem;
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        return null;
+    }
+
+    /**
+     * 覆盖本地application.yml文件
+     *
+     * @param content 文件内容
+     */
+    private void overrideLocalCanalConfig(String content) {
+        try (FileWriter writer = new FileWriter(CommonUtils.getConfPath() + "application.yml")) {
+            writer.write(content);
+            writer.flush();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载adapter配置
+     */
+    @Override
+    public void loadRemoteAdapterConfigs() {
+        try {
+            // 加载远程adapter配置
+            loadModifiedAdapterConfigs();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载有变动的adapter配置
+     */
+    private void loadModifiedAdapterConfigs() {
+        Map<String, ConfigItem> remoteConfigStatus = new HashMap<>();
+        String sql = "select id, category, name, modified_time from canal_adapter_config";
+        try (Connection conn = dataSource.getConnection();
+                Statement stmt = conn.createStatement();
+                ResultSet rs = stmt.executeQuery(sql)) {
+            while (rs.next()) {
+                ConfigItem configItem = new ConfigItem();
+                configItem.setId(rs.getLong("id"));
+                configItem.setCategory(rs.getString("category"));
+                configItem.setName(rs.getString("name"));
+                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+                remoteConfigStatus.put(configItem.getCategory() + "/" + configItem.getName(), configItem);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+
+        if (!remoteConfigStatus.isEmpty()) {
+            List<Long> changedIds = new ArrayList<>();
+
+            for (ConfigItem remoteConfigStat : remoteConfigStatus.values()) {
+                ConfigItem currentConfig = remoteAdapterConfigs
+                    .get(remoteConfigStat.getCategory() + "/" + remoteConfigStat.getName());
+                if (currentConfig == null) {
+                    // 新增
+                    changedIds.add(remoteConfigStat.getId());
+                } else {
+                    // 修改
+                    if (currentConfig.getModifiedTime() != remoteConfigStat.getModifiedTime()) {
+                        changedIds.add(remoteConfigStat.getId());
+                    }
+                }
+            }
+            if (!changedIds.isEmpty()) {
+                String contentsSql = "select id, category, name, content, modified_time from canal_adapter_config  where id in ("
+                                     + Joiner.on(",").join(changedIds) + ")";
+                try (Connection conn = dataSource.getConnection();
+                        Statement stmt = conn.createStatement();
+                        ResultSet rs = stmt.executeQuery(contentsSql)) {
+                    while (rs.next()) {
+                        ConfigItem configItemNew = new ConfigItem();
+                        configItemNew.setId(rs.getLong("id"));
+                        configItemNew.setCategory(rs.getString("category"));
+                        configItemNew.setName(rs.getString("name"));
+                        configItemNew.setContent(rs.getString("content"));
+                        configItemNew.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+
+                        remoteAdapterConfigs.put(configItemNew.getCategory() + "/" + configItemNew.getName(),
+                            configItemNew);
+                        remoteAdapterMonitor.onModify(configItemNew);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+        }
+
+        for (ConfigItem configItem : remoteAdapterConfigs.values()) {
+            if (!remoteConfigStatus.containsKey(configItem.getCategory() + "/" + configItem.getName())) {
+                // 删除
+                remoteAdapterConfigs.remove(configItem.getCategory() + "/" + configItem.getName());
+                remoteAdapterMonitor.onDelete(configItem.getCategory() + "/" + configItem.getName());
+            }
+        }
+    }
+
+    /**
+     * 启动监听数据库变化
+     */
+    @Override
+    public void startMonitor() {
+        // 监听application.yml变化
+        executor.scheduleWithFixedDelay(() -> {
+            try {
+                loadRemoteConfig();
+            } catch (Throwable e) {
+                logger.error("scan remote application.yml failed", e);
+            }
+        }, 10, 3, TimeUnit.SECONDS);
+
+        // 监听adapter变化
+        executor.scheduleWithFixedDelay(() -> {
+            try {
+                loadRemoteAdapterConfigs();
+            } catch (Throwable e) {
+                logger.error("scan remote adapter configs failed", e);
+            }
+        }, 10, 3, TimeUnit.SECONDS);
+    }
+
+    /**
+     * 销毁
+     */
+    @Override
+    public void destroy() {
+        executor.shutdownNow();
+        try {
+            dataSource.close();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+}

+ 51 - 53
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java

@@ -1,53 +1,51 @@
-package com.alibaba.otter.canal.client.adapter.rdb.config;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import com.alibaba.fastjson.JSONObject;
-import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
-
-/**
- * RDB表映射配置加载器
- *
- * @author rewerma 2018-11-07 下午02:41:34
- * @version 1.0.0
- */
-public class ConfigLoader {
-
-    private static Logger logger = LoggerFactory.getLogger(ConfigLoader.class);
-
-    /**
-     * 加载RDB表映射配置
-     *
-     * @return 配置名/配置文件名--对象
-     */
-    public static Map<String, MappingConfig> load(Properties envProperties) {
-        logger.info("## Start loading rdb mapping config ... ");
-
-        Map<String, MappingConfig> result = new LinkedHashMap<>();
-
-        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("rdb");
-        configContentMap.forEach((fileName, content) -> {
-            MappingConfig config = YmlConfigBinder
-                .bindYmlToObj(null, content, MappingConfig.class, null, envProperties);
-            if (config == null) {
-                return;
-            }
-            try {
-                config.validate();
-            } catch (Exception e) {
-                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
-            }
-            result.put(fileName, config);
-        });
-
-        logger.info("## Rdb mapping config loaded");
-        return result;
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+
+/**
+ * RDB表映射配置加载器
+ *
+ * @author rewerma 2018-11-07 下午02:41:34
+ * @version 1.0.0
+ */
+public class ConfigLoader {
+
+    private static Logger logger = LoggerFactory.getLogger(ConfigLoader.class);
+
+    /**
+     * 加载RDB表映射配置
+     *
+     * @return 配置名/配置文件名--对象
+     */
+    public static Map<String, MappingConfig> load(Properties envProperties) {
+        logger.info("## Start loading rdb mapping config ... ");
+
+        Map<String, MappingConfig> result = new LinkedHashMap<>();
+
+        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("rdb");
+        configContentMap.forEach((fileName, content) -> {
+            MappingConfig config = YmlConfigBinder
+                .bindYmlToObj(null, content, MappingConfig.class, null, envProperties);
+            if (config == null) {
+                return;
+            }
+            try {
+                config.validate();
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
+            }
+            result.put(fileName, config);
+        });
+
+        logger.info("## Rdb mapping config loaded");
+        return result;
+    }
+}

+ 169 - 170
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java

@@ -1,170 +1,169 @@
-package com.alibaba.otter.canal.client.adapter.rdb.monitor;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-
-import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
-import org.apache.commons.io.filefilter.FileFilterUtils;
-import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
-import org.apache.commons.io.monitor.FileAlterationMonitor;
-import org.apache.commons.io.monitor.FileAlterationObserver;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
-import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-
-public class RdbConfigMonitor {
-
-    private static final Logger   logger      = LoggerFactory.getLogger(RdbConfigMonitor.class);
-
-    private static final String   adapterName = "rdb";
-
-    private String                key;
-
-    private RdbAdapter            rdbAdapter;
-
-    private FileAlterationMonitor fileMonitor;
-
-    public void init(String key, RdbAdapter rdbAdapter) {
-        this.key = key;
-        this.rdbAdapter = rdbAdapter;
-        File confDir = Util.getConfDirPath(adapterName);
-        try {
-            FileAlterationObserver observer = new FileAlterationObserver(confDir,
-                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
-            FileListener listener = new FileListener();
-            observer.addListener(listener);
-            fileMonitor = new FileAlterationMonitor(3000, observer);
-            fileMonitor.start();
-
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    public void destroy() {
-        try {
-            fileMonitor.stop();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    private class FileListener extends FileAlterationListenerAdaptor {
-
-        @Override
-        public void onFileCreate(File file) {
-            super.onFileCreate(file);
-            try {
-                // 加载新增的配置文件
-                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
-                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
-                config.validate();
-                if ((key == null && config.getOuterAdapterKey() == null)
-                    || (key != null && key.equals(config.getOuterAdapterKey()))) {
-                    addConfigToCache(file, config);
-
-                    logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onFileChange(File file) {
-            super.onFileChange(file);
-
-            try {
-                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                    // 加载配置文件
-                    String configContent = MappingConfigsLoader
-                        .loadConfig(adapterName + File.separator + file.getName());
-                    if (configContent == null) {
-                        onFileDelete(file);
-                        return;
-                    }
-                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
-                    config.validate();
-                    if ((key == null && config.getOuterAdapterKey() == null)
-                        || (key != null && key.equals(config.getOuterAdapterKey()))) {
-                        if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                            deleteConfigFromCache(file);
-                        }
-                        addConfigToCache(file, config);
-                    } else {
-                        // 不能修改outerAdapterKey
-                        throw new RuntimeException("Outer adapter key not allowed modify");
-                    }
-                    logger.info("Change a rdb mapping config: {} of canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onFileDelete(File file) {
-            super.onFileDelete(file);
-
-            try {
-                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                    deleteConfigFromCache(file);
-
-                    logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        private void addConfigToCache(File file, MappingConfig mappingConfig) {
-            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
-                return;
-            }
-            rdbAdapter.getRdbMapping().put(file.getName(), mappingConfig);
-            if (!mappingConfig.getDbMapping().getMirrorDb()) {
-                Map<String, MappingConfig> configMap = rdbAdapter.getMappingConfigCache()
-                    .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                                     + mappingConfig.getDbMapping().getDatabase() + "."
-                                     + mappingConfig.getDbMapping().getTable(),
-                        k1 -> new HashMap<>());
-                configMap.put(file.getName(), mappingConfig);
-            } else {
-                Map<String, MirrorDbConfig> mirrorDbConfigCache = rdbAdapter.getMirrorDbConfigCache();
-                mirrorDbConfigCache.put(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                                        + mappingConfig.getDbMapping().getDatabase(),
-                    MirrorDbConfig.create(file.getName(), mappingConfig));
-            }
-        }
-
-        private void deleteConfigFromCache(File file) {
-            MappingConfig mappingConfig = rdbAdapter.getRdbMapping().remove(file.getName());
-
-            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
-                return;
-            }
-            if (!mappingConfig.getDbMapping().getMirrorDb()) {
-                for (Map<String, MappingConfig> configMap : rdbAdapter.getMappingConfigCache().values()) {
-                    if (configMap != null) {
-                        configMap.remove(file.getName());
-                    }
-                }
-            } else {
-                rdbAdapter.getMirrorDbConfigCache().forEach((key, mirrorDbConfig) -> {
-                    if (mirrorDbConfig.getFileName().equals(file.getName())) {
-                        rdbAdapter.getMirrorDbConfigCache().remove(key);
-                    }
-                });
-            }
-
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.monitor;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+public class RdbConfigMonitor {
+
+    private static final Logger   logger      = LoggerFactory.getLogger(RdbConfigMonitor.class);
+
+    private static final String   adapterName = "rdb";
+
+    private String                key;
+
+    private RdbAdapter            rdbAdapter;
+
+    private FileAlterationMonitor fileMonitor;
+
+    public void init(String key, RdbAdapter rdbAdapter) {
+        this.key = key;
+        this.rdbAdapter = rdbAdapter;
+        File confDir = Util.getConfDirPath(adapterName);
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileCreate(File file) {
+            super.onFileCreate(file);
+            try {
+                // 加载新增的配置文件
+                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                config.validate();
+                if ((key == null && config.getOuterAdapterKey() == null)
+                    || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                    addConfigToCache(file, config);
+
+                    logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    // 加载配置文件
+                    String configContent = MappingConfigsLoader
+                        .loadConfig(adapterName + File.separator + file.getName());
+                    if (configContent == null) {
+                        onFileDelete(file);
+                        return;
+                    }
+                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    config.validate();
+                    if ((key == null && config.getOuterAdapterKey() == null)
+                        || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                        if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                            deleteConfigFromCache(file);
+                        }
+                        addConfigToCache(file, config);
+                    } else {
+                        // 不能修改outerAdapterKey
+                        throw new RuntimeException("Outer adapter key not allowed modify");
+                    }
+                    logger.info("Change a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileDelete(File file) {
+            super.onFileDelete(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    deleteConfigFromCache(file);
+
+                    logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        private void addConfigToCache(File file, MappingConfig mappingConfig) {
+            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
+                return;
+            }
+            rdbAdapter.getRdbMapping().put(file.getName(), mappingConfig);
+            if (!mappingConfig.getDbMapping().getMirrorDb()) {
+                Map<String, MappingConfig> configMap = rdbAdapter.getMappingConfigCache()
+                    .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                                     + mappingConfig.getDbMapping().getDatabase() + "."
+                                     + mappingConfig.getDbMapping().getTable(),
+                        k1 -> new HashMap<>());
+                configMap.put(file.getName(), mappingConfig);
+            } else {
+                Map<String, MirrorDbConfig> mirrorDbConfigCache = rdbAdapter.getMirrorDbConfigCache();
+                mirrorDbConfigCache.put(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                                        + mappingConfig.getDbMapping().getDatabase(),
+                    MirrorDbConfig.create(file.getName(), mappingConfig));
+            }
+        }
+
+        private void deleteConfigFromCache(File file) {
+            MappingConfig mappingConfig = rdbAdapter.getRdbMapping().remove(file.getName());
+
+            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
+                return;
+            }
+            if (!mappingConfig.getDbMapping().getMirrorDb()) {
+                for (Map<String, MappingConfig> configMap : rdbAdapter.getMappingConfigCache().values()) {
+                    if (configMap != null) {
+                        configMap.remove(file.getName());
+                    }
+                }
+            } else {
+                rdbAdapter.getMirrorDbConfigCache().forEach((key, mirrorDbConfig) -> {
+                    if (mirrorDbConfig.getFileName().equals(file.getName())) {
+                        rdbAdapter.getMirrorDbConfigCache().remove(key);
+                    }
+                });
+            }
+
+        }
+    }
+}

+ 506 - 499
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java

@@ -1,499 +1,506 @@
-package com.alibaba.otter.canal.client.adapter.rdb.service;
-
-import java.sql.Connection;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.serializer.SerializerFeature;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
-import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
-import com.alibaba.otter.canal.client.adapter.support.Dml;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-
-/**
- * RDB同步操作业务
- *
- * @author rewerma 2018-11-7 下午06:45:49
- * @version 1.0.0
- */
-public class RdbSyncService {
-
-    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
-
-    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
-    private Map<String, Map<String, Integer>> columnsTypeCache;
-
-    private int                               threads = 3;
-    private boolean                           skipDupException;
-
-    private List<SyncItem>[]                  dmlsPartition;
-    private BatchExecutor[]                   batchExecutors;
-    private ExecutorService[]                 executorThreads;
-
-    public List<SyncItem>[] getDmlsPartition() {
-        return dmlsPartition;
-    }
-
-    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
-        return columnsTypeCache;
-    }
-
-    @SuppressWarnings("unchecked")
-    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
-        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
-    }
-
-    @SuppressWarnings("unchecked")
-    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
-                          boolean skipDupException){
-        this.columnsTypeCache = columnsTypeCache;
-        this.skipDupException = skipDupException;
-        try {
-            if (threads != null) {
-                this.threads = threads;
-            }
-            this.dmlsPartition = new List[this.threads];
-            this.batchExecutors = new BatchExecutor[this.threads];
-            this.executorThreads = new ExecutorService[this.threads];
-            for (int i = 0; i < this.threads; i++) {
-                dmlsPartition[i] = new ArrayList<>();
-                batchExecutors[i] = new BatchExecutor(dataSource);
-                executorThreads[i] = Executors.newSingleThreadExecutor();
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 批量同步回调
-     *
-     * @param dmls 批量 DML
-     * @param function 回调方法
-     */
-    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
-        try {
-            boolean toExecute = false;
-            for (Dml dml : dmls) {
-                if (!toExecute) {
-                    toExecute = function.apply(dml);
-                } else {
-                    function.apply(dml);
-                }
-            }
-            if (toExecute) {
-                List<Future> futures = new ArrayList<>();
-                for (int i = 0; i < threads; i++) {
-                    int j = i;
-                    futures.add(executorThreads[i].submit(() -> {
-                        try {
-                            dmlsPartition[j]
-                                .forEach(syncItem -> sync(batchExecutors[j], syncItem.config, syncItem.singleDml));
-                            dmlsPartition[j].clear();
-                            batchExecutors[j].commit();
-                            return true;
-                        } catch (Throwable e) {
-                            batchExecutors[j].rollback();
-                            throw new RuntimeException(e);
-                        }
-                    }));
-                }
-
-                futures.forEach(future -> {
-                    try {
-                        future.get();
-                    } catch (ExecutionException | InterruptedException e) {
-                        throw new RuntimeException(e);
-                    }
-                });
-            }
-        } finally {
-            for (BatchExecutor batchExecutor : batchExecutors) {
-                if (batchExecutor != null) {
-                    batchExecutor.close();
-                }
-            }
-        }
-    }
-
-    /**
-     * 批量同步
-     *
-     * @param mappingConfig 配置集合
-     * @param dmls 批量 DML
-     */
-    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls, Properties envProperties) {
-        sync(dmls, dml -> {
-            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
-                // DDL
-                columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
-                return false;
-            } else {
-                // DML
-                String destination = StringUtils.trimToEmpty(dml.getDestination());
-                String groupId = StringUtils.trimToEmpty(dml.getGroupId());
-                String database = dml.getDatabase();
-                String table = dml.getTable();
-                Map<String, MappingConfig> configMap;
-                if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
-                    configMap = mappingConfig.get(destination + "-" + groupId + "_" + database + "-" + table);
-                } else {
-                    configMap = mappingConfig.get(destination + "_" + database + "-" + table);
-                }
-
-                if (configMap == null) {
-                    return false;
-                }
-
-                if (configMap.values().isEmpty()) {
-                    return false;
-                }
-
-                for (MappingConfig config : configMap.values()) {
-                    if (config.getConcurrent()) {
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            int hash = pkHash(config.getDbMapping(), singleDml.getData());
-                            SyncItem syncItem = new SyncItem(config, singleDml);
-                            dmlsPartition[hash].add(syncItem);
-                        });
-                    } else {
-                        int hash = 0;
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            SyncItem syncItem = new SyncItem(config, singleDml);
-                            dmlsPartition[hash].add(syncItem);
-                        });
-                    }
-                }
-                return true;
-            }
-        });
-    }
-
-    /**
-     * 单条 dml 同步
-     *
-     * @param batchExecutor 批量事务执行器
-     * @param config 对应配置对象
-     * @param dml DML
-     */
-    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
-        if (config != null) {
-            try {
-                String type = dml.getType();
-                if (type != null && type.equalsIgnoreCase("INSERT")) {
-                    insert(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
-                    update(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
-                    delete(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
-                    truncate(batchExecutor, config);
-                }
-                if (logger.isDebugEnabled()) {
-                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
-                }
-            } catch (SQLException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-
-    /**
-     * 插入操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        StringBuilder insertSql = new StringBuilder();
-        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
-
-        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
-        int len = insertSql.length();
-        insertSql.delete(len - 1, len).append(") VALUES (");
-        int mapLen = columnsMap.size();
-        for (int i = 0; i < mapLen; i++) {
-            insertSql.append("?,");
-        }
-        len = insertSql.length();
-        insertSql.delete(len - 1, len).append(")");
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            Object value = data.get(srcColumnName);
-            BatchExecutor.setValue(values, type, value);
-        }
-
-        try {
-            batchExecutor.execute(insertSql.toString(), values);
-        } catch (SQLException e) {
-            if (skipDupException
-                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
-                // ignore
-                // TODO 增加更多关系数据库的主键冲突的错误码
-            } else {
-                throw e;
-            }
-        }
-        if (logger.isTraceEnabled()) {
-            logger.trace("Insert into target table, sql: {}", insertSql);
-        }
-
-    }
-
-    /**
-     * 更新操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        Map<String, Object> old = dml.getOld();
-        if (old == null || old.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder updateSql = new StringBuilder();
-        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
-        List<Map<String, ?>> values = new ArrayList<>();
-        boolean hasMatched = false;
-        for (String srcColumnName : old.keySet()) {
-            List<String> targetColumnNames = new ArrayList<>();
-            columnsMap.forEach((targetColumn, srcColumn) -> {
-                if (srcColumnName.toLowerCase().equals(srcColumn.toLowerCase())) {
-                    targetColumnNames.add(targetColumn);
-                }
-            });
-            if (!targetColumnNames.isEmpty()) {
-                hasMatched = true;
-                for (String targetColumnName : targetColumnNames) {
-                    updateSql.append(targetColumnName).append("=?, ");
-                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-                    if (type == null) {
-                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-                    }
-                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
-                }
-            }
-        }
-        if (!hasMatched) {
-            logger.warn("Did not matched any columns to update ");
-            return;
-        }
-        int len = updateSql.length();
-        updateSql.delete(len - 2, len).append(" WHERE ");
-
-        // 拼接主键
-        appendCondition(dbMapping, updateSql, ctype, values, data, old);
-        batchExecutor.execute(updateSql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Update target table, sql: {}", updateSql);
-        }
-    }
-
-    /**
-     * 删除操作
-     *
-     * @param config
-     * @param dml
-     */
-    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder sql = new StringBuilder();
-        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        // 拼接主键
-        appendCondition(dbMapping, sql, ctype, values, data);
-        batchExecutor.execute(sql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Delete from target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * truncate操作
-     *
-     * @param config
-     */
-    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
-        DbMapping dbMapping = config.getDbMapping();
-        StringBuilder sql = new StringBuilder();
-        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
-        batchExecutor.execute(sql.toString(), new ArrayList<>());
-        if (logger.isTraceEnabled()) {
-            logger.trace("Truncate target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * 获取目标字段类型
-     *
-     * @param conn sql connection
-     * @param config 映射配置
-     * @return 字段sqlType
-     */
-    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
-        DbMapping dbMapping = config.getDbMapping();
-        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
-        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
-        if (columnType == null) {
-            synchronized (RdbSyncService.class) {
-                columnType = columnsTypeCache.get(cacheKey);
-                if (columnType == null) {
-                    columnType = new LinkedHashMap<>();
-                    final Map<String, Integer> columnTypeTmp = columnType;
-                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
-                    Util.sqlRS(conn, sql, rs -> {
-                        try {
-                            ResultSetMetaData rsd = rs.getMetaData();
-                            int columnCount = rsd.getColumnCount();
-                            for (int i = 1; i <= columnCount; i++) {
-                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
-                            }
-                            columnsTypeCache.put(cacheKey, columnTypeTmp);
-                        } catch (SQLException e) {
-                            logger.error(e.getMessage(), e);
-                        }
-                    });
-                }
-            }
-        }
-        return columnType;
-    }
-
-    /**
-     * 拼接主键 where条件
-     */
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d) {
-        appendCondition(dbMapping, sql, ctype, values, d, null);
-    }
-
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
-        // 拼接主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            sql.append(targetColumnName).append("=? AND ");
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            // 如果有修改主键的情况
-            if (o != null && o.containsKey(srcColumnName)) {
-                BatchExecutor.setValue(values, type, o.get(srcColumnName));
-            } else {
-                BatchExecutor.setValue(values, type, d.get(srcColumnName));
-            }
-        }
-        int len = sql.length();
-        sql.delete(len - 4, len);
-    }
-
-    public static class SyncItem {
-
-        private MappingConfig config;
-        private SingleDml     singleDml;
-
-        public SyncItem(MappingConfig config, SingleDml singleDml){
-            this.config = config;
-            this.singleDml = singleDml;
-        }
-    }
-
-    /**
-     * 取主键hash
-     */
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
-        return pkHash(dbMapping, d, null);
-    }
-
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
-        int hash = 0;
-        // 取主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            Object value = null;
-            if (o != null && o.containsKey(srcColumnName)) {
-                value = o.get(srcColumnName);
-            } else if (d != null) {
-                value = d.get(srcColumnName);
-            }
-            if (value != null) {
-                hash += value.hashCode();
-            }
-        }
-        hash = Math.abs(hash) % threads;
-        return Math.abs(hash);
-    }
-
-    public void close() {
-        for (int i = 0; i < threads; i++) {
-            executorThreads[i].shutdown();
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.service;
+
+import java.sql.Connection;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+/**
+ * RDB同步操作业务
+ *
+ * @author rewerma 2018-11-7 下午06:45:49
+ * @version 1.0.0
+ */
+public class RdbSyncService {
+
+    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
+
+    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
+    private Map<String, Map<String, Integer>> columnsTypeCache;
+
+    private int                               threads = 3;
+    private boolean                           skipDupException;
+
+    private List<SyncItem>[]                  dmlsPartition;
+    private BatchExecutor[]                   batchExecutors;
+    private ExecutorService[]                 executorThreads;
+
+    public List<SyncItem>[] getDmlsPartition() {
+        return dmlsPartition;
+    }
+
+    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
+        return columnsTypeCache;
+    }
+
+    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
+        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
+    }
+
+    @SuppressWarnings("unchecked")
+    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
+                          boolean skipDupException){
+        this.columnsTypeCache = columnsTypeCache;
+        this.skipDupException = skipDupException;
+        try {
+            if (threads != null) {
+                this.threads = threads;
+            }
+            this.dmlsPartition = new List[this.threads];
+            this.batchExecutors = new BatchExecutor[this.threads];
+            this.executorThreads = new ExecutorService[this.threads];
+            for (int i = 0; i < this.threads; i++) {
+                dmlsPartition[i] = new ArrayList<>();
+                batchExecutors[i] = new BatchExecutor(dataSource);
+                executorThreads[i] = Executors.newSingleThreadExecutor();
+            }
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 批量同步回调
+     *
+     * @param dmls 批量 DML
+     * @param function 回调方法
+     */
+    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
+        try {
+            boolean toExecute = false;
+            for (Dml dml : dmls) {
+                if (!toExecute) {
+                    toExecute = function.apply(dml);
+                } else {
+                    function.apply(dml);
+                }
+            }
+            if (toExecute) {
+                List<Future<Boolean>> futures = new ArrayList<>();
+                for (int i = 0; i < threads; i++) {
+                    int j = i;
+                    futures.add(executorThreads[i].submit(() -> {
+                        try {
+                            dmlsPartition[j]
+                                .forEach(syncItem -> sync(batchExecutors[j], syncItem.config, syncItem.singleDml));
+                            dmlsPartition[j].clear();
+                            batchExecutors[j].commit();
+                            return true;
+                        } catch (Throwable e) {
+                            batchExecutors[j].rollback();
+                            throw new RuntimeException(e);
+                        }
+                    }));
+                }
+
+                futures.forEach(future -> {
+                    try {
+                        future.get();
+                    } catch (ExecutionException | InterruptedException e) {
+                        throw new RuntimeException(e);
+                    }
+                });
+            }
+        } finally {
+            for (BatchExecutor batchExecutor : batchExecutors) {
+                if (batchExecutor != null) {
+                    batchExecutor.close();
+                }
+            }
+        }
+    }
+
+    /**
+     * 批量同步
+     *
+     * @param mappingConfig 配置集合
+     * @param dmls 批量 DML
+     */
+    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls, Properties envProperties) {
+        sync(dmls, dml -> {
+            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
+                // DDL
+                columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
+                return false;
+            } else {
+                // DML
+                String destination = StringUtils.trimToEmpty(dml.getDestination());
+                String groupId = StringUtils.trimToEmpty(dml.getGroupId());
+                String database = dml.getDatabase();
+                String table = dml.getTable();
+                Map<String, MappingConfig> configMap;
+                if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                    configMap = mappingConfig.get(destination + "-" + groupId + "_" + database + "-" + table);
+                } else {
+                    configMap = mappingConfig.get(destination + "_" + database + "-" + table);
+                }
+
+                if (configMap == null) {
+                    return false;
+                }
+
+                if (configMap.values().isEmpty()) {
+                    return false;
+                }
+
+                for (MappingConfig config : configMap.values()) {
+                    if (config.getConcurrent()) {
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            int hash = pkHash(config.getDbMapping(), singleDml.getData());
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    } else {
+                        int hash = 0;
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    /**
+     * 单条 dml 同步
+     *
+     * @param batchExecutor 批量事务执行器
+     * @param config 对应配置对象
+     * @param dml DML
+     */
+    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        if (config != null) {
+            try {
+                String type = dml.getType();
+                if (type != null && type.equalsIgnoreCase("INSERT")) {
+                    insert(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                    update(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                    delete(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
+                    truncate(batchExecutor, config);
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                }
+            } catch (SQLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    /**
+     * 插入操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        StringBuilder insertSql = new StringBuilder();
+        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
+
+        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+        int len = insertSql.length();
+        insertSql.delete(len - 1, len).append(") VALUES (");
+        int mapLen = columnsMap.size();
+        for (int i = 0; i < mapLen; i++) {
+            insertSql.append("?,");
+        }
+        len = insertSql.length();
+        insertSql.delete(len - 1, len).append(")");
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            Object value = data.get(srcColumnName);
+            BatchExecutor.setValue(values, type, value);
+        }
+
+        try {
+            batchExecutor.execute(insertSql.toString(), values);
+        } catch (SQLException e) {
+            if (skipDupException
+                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
+                // ignore
+                // TODO 增加更多关系数据库的主键冲突的错误码
+            } else {
+                throw e;
+            }
+        }
+        if (logger.isTraceEnabled()) {
+            logger.trace("Insert into target table, sql: {}", insertSql);
+        }
+
+    }
+
+    /**
+     * 更新操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        Map<String, Object> old = dml.getOld();
+        if (old == null || old.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder updateSql = new StringBuilder();
+        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
+        List<Map<String, ?>> values = new ArrayList<>();
+        boolean hasMatched = false;
+        for (String srcColumnName : old.keySet()) {
+            List<String> targetColumnNames = new ArrayList<>();
+            columnsMap.forEach((targetColumn, srcColumn) -> {
+                if (srcColumnName.toLowerCase().equals(srcColumn.toLowerCase())) {
+                    targetColumnNames.add(targetColumn);
+                }
+            });
+            if (!targetColumnNames.isEmpty()) {
+                hasMatched = true;
+                for (String targetColumnName : targetColumnNames) {
+                    updateSql.append(targetColumnName).append("=?, ");
+                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+                    if (type == null) {
+                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+                    }
+                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
+                }
+            }
+        }
+        if (!hasMatched) {
+            logger.warn("Did not matched any columns to update ");
+            return;
+        }
+        int len = updateSql.length();
+        updateSql.delete(len - 2, len).append(" WHERE ");
+
+        // 拼接主键
+        appendCondition(dbMapping, updateSql, ctype, values, data, old);
+        batchExecutor.execute(updateSql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Update target table, sql: {}", updateSql);
+        }
+    }
+
+    /**
+     * 删除操作
+     *
+     * @param config
+     * @param dml
+     */
+    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder sql = new StringBuilder();
+        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        // 拼接主键
+        appendCondition(dbMapping, sql, ctype, values, data);
+        batchExecutor.execute(sql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Delete from target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * truncate操作
+     *
+     * @param config
+     */
+    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
+        DbMapping dbMapping = config.getDbMapping();
+        StringBuilder sql = new StringBuilder();
+        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
+        batchExecutor.execute(sql.toString(), new ArrayList<>());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Truncate target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * 获取目标字段类型
+     *
+     * @param conn sql connection
+     * @param config 映射配置
+     * @return 字段sqlType
+     */
+    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
+        DbMapping dbMapping = config.getDbMapping();
+        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
+        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
+        if (columnType == null) {
+            synchronized (RdbSyncService.class) {
+                columnType = columnsTypeCache.get(cacheKey);
+                if (columnType == null) {
+                    columnType = new LinkedHashMap<>();
+                    final Map<String, Integer> columnTypeTmp = columnType;
+                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
+                    Util.sqlRS(conn, sql, rs -> {
+                        try {
+                            ResultSetMetaData rsd = rs.getMetaData();
+                            int columnCount = rsd.getColumnCount();
+                            for (int i = 1; i <= columnCount; i++) {
+                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
+                            }
+                            columnsTypeCache.put(cacheKey, columnTypeTmp);
+                        } catch (SQLException e) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    });
+                }
+            }
+        }
+        return columnType;
+    }
+
+    /**
+     * 拼接主键 where条件
+     */
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d) {
+        appendCondition(dbMapping, sql, ctype, values, d, null);
+    }
+
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
+        // 拼接主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            sql.append(targetColumnName).append("=? AND ");
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            // 如果有修改主键的情况
+            if (o != null && o.containsKey(srcColumnName)) {
+                BatchExecutor.setValue(values, type, o.get(srcColumnName));
+            } else {
+                BatchExecutor.setValue(values, type, d.get(srcColumnName));
+            }
+        }
+        int len = sql.length();
+        sql.delete(len - 4, len);
+    }
+
+    public static class SyncItem {
+
+        private MappingConfig config;
+        private SingleDml     singleDml;
+
+        public SyncItem(MappingConfig config, SingleDml singleDml){
+            this.config = config;
+            this.singleDml = singleDml;
+        }
+    }
+
+    /**
+     * 取主键hash
+     */
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
+        return pkHash(dbMapping, d, null);
+    }
+
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
+        int hash = 0;
+        // 取主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            Object value = null;
+            if (o != null && o.containsKey(srcColumnName)) {
+                value = o.get(srcColumnName);
+            } else if (d != null) {
+                value = d.get(srcColumnName);
+            }
+            if (value != null) {
+                hash += value.hashCode();
+            }
+        }
+        hash = Math.abs(hash) % threads;
+        return Math.abs(hash);
+    }
+
+    public void close() {
+        for (int i = 0; i < threads; i++) {
+            executorThreads[i].shutdown();
+        }
+    }
+}

+ 325 - 326
client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java

@@ -1,326 +1,325 @@
-package com.alibaba.otter.canal.client.kafka;
-
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-
-import com.google.common.collect.MapMaker;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.serialization.StringDeserializer;
-
-import com.alibaba.fastjson.JSON;
-import com.alibaba.otter.canal.client.CanalMQConnector;
-import com.alibaba.otter.canal.client.impl.SimpleCanalConnector;
-import com.alibaba.otter.canal.protocol.FlatMessage;
-import com.alibaba.otter.canal.protocol.Message;
-import com.alibaba.otter.canal.protocol.exception.CanalClientException;
-import com.google.common.collect.Lists;
-
-/**
- * canal kafka 数据操作客户端
- *
- * <pre>
- * 注意点:
- * 1. 相比于canal {@linkplain SimpleCanalConnector}, 这里get和ack操作不能有并发, 必须是一个线程执行get后,内存里执行完毕ack后再取下一个get
- * </pre>
- *
- * @author machengyuan @ 2018-6-12
- * @version 1.1.1
- */
-public class KafkaCanalConnector implements CanalMQConnector {
-
-    protected KafkaConsumer<String, Message> kafkaConsumer;
-    protected KafkaConsumer<String, String>  kafkaConsumer2;                            // 用于扁平message的数据消费
-    protected String                         topic;
-    protected Integer                        partition;
-    protected Properties                     properties;
-    protected volatile boolean               connected      = false;
-    protected volatile boolean               running        = false;
-    protected boolean                        flatMessage;
-
-    private Map<Integer, Long>               currentOffsets = new ConcurrentHashMap<>();
-
-    public KafkaCanalConnector(String servers, String topic, Integer partition, String groupId, Integer batchSize,
-                               boolean flatMessage){
-        this.topic = topic;
-        this.partition = partition;
-        this.flatMessage = flatMessage;
-
-        properties = new Properties();
-        properties.put("bootstrap.servers", servers);
-        properties.put("group.id", groupId);
-        properties.put("enable.auto.commit", false);
-        properties.put("auto.commit.interval.ms", "1000");
-        properties.put("auto.offset.reset", "latest"); // 如果没有offset则从最后的offset开始读
-        properties.put("request.timeout.ms", "40000"); // 必须大于session.timeout.ms的设置
-        properties.put("session.timeout.ms", "30000"); // 默认为30秒
-        if (batchSize == null) {
-            batchSize = 100;
-        }
-        properties.put("max.poll.records", batchSize.toString());
-        properties.put("key.deserializer", StringDeserializer.class.getName());
-        if (!flatMessage) {
-            properties.put("value.deserializer", MessageDeserializer.class.getName());
-        } else {
-            properties.put("value.deserializer", StringDeserializer.class.getName());
-        }
-    }
-
-    /**
-     * 打开连接
-     */
-    @Override
-    public void connect() {
-        if (connected) {
-            return;
-        }
-
-        connected = true;
-        if (kafkaConsumer == null && !flatMessage) {
-            kafkaConsumer = new KafkaConsumer<String, Message>(properties);
-
-        }
-        if (kafkaConsumer2 == null && flatMessage) {
-            kafkaConsumer2 = new KafkaConsumer<String, String>(properties);
-        }
-    }
-
-    /**
-     * 关闭链接
-     */
-    @Override
-    public void disconnect() {
-        if (kafkaConsumer != null) {
-            kafkaConsumer.close();
-            kafkaConsumer = null;
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.close();
-            kafkaConsumer2 = null;
-        }
-
-        connected = false;
-    }
-
-    protected void waitClientRunning() {
-        running = true;
-    }
-
-    @Override
-    public boolean checkValid() {
-        return true;// 默认都放过
-    }
-
-    /**
-     * 订阅topic
-     */
-    @Override
-    public void subscribe() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (partition == null) {
-            if (kafkaConsumer != null) {
-                kafkaConsumer.subscribe(Collections.singletonList(topic));
-            }
-            if (kafkaConsumer2 != null) {
-                kafkaConsumer2.subscribe(Collections.singletonList(topic));
-            }
-        } else {
-            TopicPartition topicPartition = new TopicPartition(topic, partition);
-            if (kafkaConsumer != null) {
-                kafkaConsumer.assign(Collections.singletonList(topicPartition));
-            }
-            if (kafkaConsumer2 != null) {
-                kafkaConsumer2.assign(Collections.singletonList(topicPartition));
-            }
-        }
-    }
-
-    /**
-     * 取消订阅
-     */
-    @Override
-    public void unsubscribe() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (kafkaConsumer != null) {
-            kafkaConsumer.unsubscribe();
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.unsubscribe();
-        }
-    }
-
-    @Override
-    public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        List<Message> messages = getListWithoutAck(timeout, unit);
-        if (messages != null && !messages.isEmpty()) {
-            this.ack();
-        }
-        return messages;
-    }
-
-    @Override
-    public List<Message> getListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));
-
-        currentOffsets.clear();
-        for (TopicPartition topicPartition : records.partitions()) {
-            currentOffsets.put(topicPartition.partition(), kafkaConsumer.position(topicPartition));
-        }
-
-        if (!records.isEmpty()) {
-            List<Message> messages = new ArrayList<>();
-            for (ConsumerRecord<String, Message> record : records) {
-                messages.add(record.value());
-            }
-            return messages;
-        }
-        return Lists.newArrayList();
-    }
-
-    @Override
-    public List<FlatMessage> getFlatList(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        List<FlatMessage> messages = getFlatListWithoutAck(timeout, unit);
-        if (messages != null && !messages.isEmpty()) {
-            this.ack();
-        }
-        return messages;
-    }
-
-    @Override
-    public List<FlatMessage> getFlatListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        ConsumerRecords<String, String> records = kafkaConsumer2.poll(unit.toMillis(timeout));
-
-        currentOffsets.clear();
-        for (TopicPartition topicPartition : records.partitions()) {
-            currentOffsets.put(topicPartition.partition(), kafkaConsumer2.position(topicPartition));
-        }
-
-        if (!records.isEmpty()) {
-            List<FlatMessage> flatMessages = new ArrayList<>();
-            for (ConsumerRecord<String, String> record : records) {
-                String flatMessageJson = record.value();
-                FlatMessage flatMessage = JSON.parseObject(flatMessageJson, FlatMessage.class);
-                flatMessages.add(flatMessage);
-            }
-
-            return flatMessages;
-        }
-        return Lists.newArrayList();
-    }
-
-    @Override
-    public void rollback() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-        // 回滚所有分区
-        if (kafkaConsumer != null) {
-            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
-                kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
-            }
-        }
-        if (kafkaConsumer2 != null) {
-            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
-                kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
-            }
-        }
-    }
-
-    /**
-     * 提交offset,如果超过 session.timeout.ms 设置的时间没有ack则会抛出异常,ack失败
-     */
-    @Override
-    public void ack() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (kafkaConsumer != null) {
-            kafkaConsumer.commitSync();
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.commitSync();
-        }
-    }
-
-    @Override
-    public void subscribe(String filter) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message get(int batchSize) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message getWithoutAck(int batchSize) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public void ack(long batchId) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public void rollback(long batchId) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    /**
-     * 重新设置sessionTime
-     *
-     * @param timeout
-     * @param unit
-     */
-    public void setSessionTimeout(Long timeout, TimeUnit unit) {
-        long t = unit.toMillis(timeout);
-        properties.put("request.timeout.ms", String.valueOf(t + 60000));
-        properties.put("session.timeout.ms", String.valueOf(t));
-    }
-
-}
+package com.alibaba.otter.canal.client.kafka;
+
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.serialization.StringDeserializer;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.otter.canal.client.CanalMQConnector;
+import com.alibaba.otter.canal.client.impl.SimpleCanalConnector;
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import com.alibaba.otter.canal.protocol.Message;
+import com.alibaba.otter.canal.protocol.exception.CanalClientException;
+import com.google.common.collect.Lists;
+
+/**
+ * canal kafka 数据操作客户端
+ *
+ * <pre>
+ * 注意点:
+ * 1. 相比于canal {@linkplain SimpleCanalConnector}, 这里get和ack操作不能有并发, 必须是一个线程执行get后,内存里执行完毕ack后再取下一个get
+ * </pre>
+ *
+ * @author machengyuan @ 2018-6-12
+ * @version 1.1.1
+ */
+public class KafkaCanalConnector implements CanalMQConnector {
+
+    protected KafkaConsumer<String, Message> kafkaConsumer;
+    protected KafkaConsumer<String, String>  kafkaConsumer2;                            // 用于扁平message的数据消费
+    protected String                         topic;
+    protected Integer                        partition;
+    protected Properties                     properties;
+    protected volatile boolean               connected      = false;
+    protected volatile boolean               running        = false;
+    protected boolean                        flatMessage;
+
+    private Map<Integer, Long>               currentOffsets = new ConcurrentHashMap<>();
+
+    public KafkaCanalConnector(String servers, String topic, Integer partition, String groupId, Integer batchSize,
+                               boolean flatMessage){
+        this.topic = topic;
+        this.partition = partition;
+        this.flatMessage = flatMessage;
+
+        properties = new Properties();
+        properties.put("bootstrap.servers", servers);
+        properties.put("group.id", groupId);
+        properties.put("enable.auto.commit", false);
+        properties.put("auto.commit.interval.ms", "1000");
+        properties.put("auto.offset.reset", "latest"); // 如果没有offset则从最后的offset开始读
+        properties.put("request.timeout.ms", "40000"); // 必须大于session.timeout.ms的设置
+        properties.put("session.timeout.ms", "30000"); // 默认为30秒
+        if (batchSize == null) {
+            batchSize = 100;
+        }
+        properties.put("max.poll.records", batchSize.toString());
+        properties.put("key.deserializer", StringDeserializer.class.getName());
+        if (!flatMessage) {
+            properties.put("value.deserializer", MessageDeserializer.class.getName());
+        } else {
+            properties.put("value.deserializer", StringDeserializer.class.getName());
+        }
+    }
+
+    /**
+     * 打开连接
+     */
+    @Override
+    public void connect() {
+        if (connected) {
+            return;
+        }
+
+        connected = true;
+        if (kafkaConsumer == null && !flatMessage) {
+            kafkaConsumer = new KafkaConsumer<String, Message>(properties);
+
+        }
+        if (kafkaConsumer2 == null && flatMessage) {
+            kafkaConsumer2 = new KafkaConsumer<String, String>(properties);
+        }
+    }
+
+    /**
+     * 关闭链接
+     */
+    @Override
+    public void disconnect() {
+        if (kafkaConsumer != null) {
+            kafkaConsumer.close();
+            kafkaConsumer = null;
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.close();
+            kafkaConsumer2 = null;
+        }
+
+        connected = false;
+    }
+
+    protected void waitClientRunning() {
+        running = true;
+    }
+
+    @Override
+    public boolean checkValid() {
+        return true;// 默认都放过
+    }
+
+    /**
+     * 订阅topic
+     */
+    @Override
+    public void subscribe() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (partition == null) {
+            if (kafkaConsumer != null) {
+                kafkaConsumer.subscribe(Collections.singletonList(topic));
+            }
+            if (kafkaConsumer2 != null) {
+                kafkaConsumer2.subscribe(Collections.singletonList(topic));
+            }
+        } else {
+            TopicPartition topicPartition = new TopicPartition(topic, partition);
+            if (kafkaConsumer != null) {
+                kafkaConsumer.assign(Collections.singletonList(topicPartition));
+            }
+            if (kafkaConsumer2 != null) {
+                kafkaConsumer2.assign(Collections.singletonList(topicPartition));
+            }
+        }
+    }
+
+    /**
+     * 取消订阅
+     */
+    @Override
+    public void unsubscribe() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (kafkaConsumer != null) {
+            kafkaConsumer.unsubscribe();
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.unsubscribe();
+        }
+    }
+
+    @Override
+    public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        List<Message> messages = getListWithoutAck(timeout, unit);
+        if (messages != null && !messages.isEmpty()) {
+            this.ack();
+        }
+        return messages;
+    }
+
+    @Override
+    public List<Message> getListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));
+
+        currentOffsets.clear();
+        for (TopicPartition topicPartition : records.partitions()) {
+            currentOffsets.put(topicPartition.partition(), kafkaConsumer.position(topicPartition));
+        }
+
+        if (!records.isEmpty()) {
+            List<Message> messages = new ArrayList<>();
+            for (ConsumerRecord<String, Message> record : records) {
+                messages.add(record.value());
+            }
+            return messages;
+        }
+        return Lists.newArrayList();
+    }
+
+    @Override
+    public List<FlatMessage> getFlatList(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        List<FlatMessage> messages = getFlatListWithoutAck(timeout, unit);
+        if (messages != null && !messages.isEmpty()) {
+            this.ack();
+        }
+        return messages;
+    }
+
+    @Override
+    public List<FlatMessage> getFlatListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        ConsumerRecords<String, String> records = kafkaConsumer2.poll(unit.toMillis(timeout));
+
+        currentOffsets.clear();
+        for (TopicPartition topicPartition : records.partitions()) {
+            currentOffsets.put(topicPartition.partition(), kafkaConsumer2.position(topicPartition));
+        }
+
+        if (!records.isEmpty()) {
+            List<FlatMessage> flatMessages = new ArrayList<>();
+            for (ConsumerRecord<String, String> record : records) {
+                String flatMessageJson = record.value();
+                FlatMessage flatMessage = JSON.parseObject(flatMessageJson, FlatMessage.class);
+                flatMessages.add(flatMessage);
+            }
+
+            return flatMessages;
+        }
+        return Lists.newArrayList();
+    }
+
+    @Override
+    public void rollback() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+        // 回滚所有分区
+        if (kafkaConsumer != null) {
+            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
+                kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
+            }
+        }
+        if (kafkaConsumer2 != null) {
+            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
+                kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
+            }
+        }
+    }
+
+    /**
+     * 提交offset,如果超过 session.timeout.ms 设置的时间没有ack则会抛出异常,ack失败
+     */
+    @Override
+    public void ack() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (kafkaConsumer != null) {
+            kafkaConsumer.commitSync();
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.commitSync();
+        }
+    }
+
+    @Override
+    public void subscribe(String filter) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message get(int batchSize) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message getWithoutAck(int batchSize) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public void ack(long batchId) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public void rollback(long batchId) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    /**
+     * 重新设置sessionTime
+     *
+     * @param timeout
+     * @param unit
+     */
+    public void setSessionTimeout(Long timeout, TimeUnit unit) {
+        long t = unit.toMillis(timeout);
+        properties.put("request.timeout.ms", String.valueOf(t + 60000));
+        properties.put("session.timeout.ms", String.valueOf(t));
+    }
+
+}

+ 35 - 32
client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaFlatMessage.java

@@ -1,32 +1,35 @@
-package com.alibaba.otter.canal.client.kafka.protocol;
-
-import com.alibaba.otter.canal.protocol.FlatMessage;
-import org.springframework.beans.BeanUtils;
-
-/**
- * 消息对象(Kafka)
- *
- * @Author panjianping
- * @Email ipanjianping@qq.com
- * @Date 2018/12/17
- */
-public class KafkaFlatMessage extends FlatMessage {
-    /**
-     * Kafka 消息 offset
-     */
-    private long offset;
-
-    public KafkaFlatMessage(FlatMessage message, long offset) {
-        super(message.getId());
-        BeanUtils.copyProperties(message, this);
-        this.offset = offset;
-    }
-
-    public long getOffset() {
-        return offset;
-    }
-
-    public void setOffset(long offset) {
-        this.offset = offset;
-    }
-}
+package com.alibaba.otter.canal.client.kafka.protocol;
+
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import org.springframework.beans.BeanUtils;
+
+/**
+ * 消息对象(Kafka)
+ *
+ * @Author panjianping
+ * @Email ipanjianping@qq.com
+ * @Date 2018/12/17
+ */
+public class KafkaFlatMessage extends FlatMessage {
+
+    private static final long serialVersionUID = 5748024400508080710L;
+
+    /**
+     * Kafka 消息 offset
+     */
+    private long              offset;
+
+    public KafkaFlatMessage(FlatMessage message, long offset){
+        super(message.getId());
+        BeanUtils.copyProperties(message, this);
+        this.offset = offset;
+    }
+
+    public long getOffset() {
+        return offset;
+    }
+
+    public void setOffset(long offset) {
+        this.offset = offset;
+    }
+}

+ 35 - 33
client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaMessage.java

@@ -1,33 +1,35 @@
-package com.alibaba.otter.canal.client.kafka.protocol;
-
-import com.alibaba.otter.canal.protocol.Message;
-import org.springframework.beans.BeanUtils;
-
-/**
- * 消息对象(Kafka)
- *
- * @Author panjianping
- * @Email ipanjianping@qq.com
- * @Date 2018/12/17
- */
-public class KafkaMessage extends Message {
-    /**
-     * Kafka 消息 offset
-     */
-    private long offset;
-
-    public KafkaMessage(Message message, long offset) {
-        super(message.getId());
-        BeanUtils.copyProperties(message, this);
-        this.offset = offset;
-    }
-
-
-    public long getOffset() {
-        return offset;
-    }
-
-    public void setOffset(long offset) {
-        this.offset = offset;
-    }
-}
+package com.alibaba.otter.canal.client.kafka.protocol;
+
+import com.alibaba.otter.canal.protocol.Message;
+import org.springframework.beans.BeanUtils;
+
+/**
+ * 消息对象(Kafka)
+ *
+ * @Author panjianping
+ * @Email ipanjianping@qq.com
+ * @Date 2018/12/17
+ */
+public class KafkaMessage extends Message {
+
+    private static final long serialVersionUID = -293120358490119447L;
+
+    /**
+     * Kafka 消息 offset
+     */
+    private long              offset;
+
+    public KafkaMessage(Message message, long offset){
+        super(message.getId());
+        BeanUtils.copyProperties(message, this);
+        this.offset = offset;
+    }
+
+    public long getOffset() {
+        return offset;
+    }
+
+    public void setOffset(long offset) {
+        this.offset = offset;
+    }
+}