瀏覽代碼

Merge pull request #2 from alibaba/master

合并原项目最新代码
bigbro 6 年之前
父節點
當前提交
0c4711ab62
共有 91 個文件被更改,包括 7043 次插入2801 次删除
  1. 12 1
      client-adapter/common/pom.xml
  2. 6 4
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/OuterAdapter.java
  3. 155 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java
  4. 97 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/DefaultPropertyNamePatternsMatcher.java
  5. 31 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/InetAddressEditor.java
  6. 52 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java
  7. 27 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PatternPropertyNamePatternsMatcher.java
  8. 356 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertiesConfigurationFactory.java
  9. 38 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyNamePatternsMatcher.java
  10. 30 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyOrigin.java
  11. 164 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyResolver.java
  12. 233 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyValues.java
  13. 127 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java
  14. 729 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedDataBinder.java
  15. 241 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedNames.java
  16. 17 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/StringToCharArrayConverter.java
  17. 203 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/AbstractResource.java
  18. 117 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java
  19. 107 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/CompositePropertySource.java
  20. 58 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/EnumerablePropertySource.java
  21. 38 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MapPropertySource.java
  22. 221 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MutablePropertySources.java
  23. 34 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertiesPropertySource.java
  24. 239 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySource.java
  25. 35 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySourceLoader.java
  26. 25 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySources.java
  27. 57 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/Resource.java
  28. 182 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/SpringProfileDocumentMatcher.java
  29. 419 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java
  30. 87 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlPropertySourceLoader.java
  31. 9 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Dml.java
  32. 15 8
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java
  33. 8 4
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MessageUtil.java
  34. 1 7
      client-adapter/elasticsearch/pom.xml
  35. 44 20
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java
  36. 21 10
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java
  37. 7 7
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfigLoader.java
  38. 15 1
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java
  39. 9 6
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SqlParser.java
  40. 66 70
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java
  41. 73 91
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java
  42. 107 205
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java
  43. 4 4
      client-adapter/elasticsearch/src/main/resources/es/mytest_user.yml
  44. 1 1
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java
  45. 1 1
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/SqlParseTest.java
  46. 1 1
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java
  47. 0 6
      client-adapter/hbase/pom.xml
  48. 39 12
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java
  49. 10 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfig.java
  50. 8 7
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java
  51. 397 392
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java
  52. 2 1
      client-adapter/hbase/src/main/resources/hbase/mytest_person2.yml
  53. 3 2
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java
  54. 5 2
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java
  55. 216 198
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java
  56. 2 1
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java
  57. 246 323
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java
  58. 56 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/RemoteAdapterMonitorImpl.java
  59. 2 1
      client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java
  60. 0 7
      client-adapter/rdb/pom.xml
  61. 17 15
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java
  62. 51 50
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java
  63. 10 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java
  64. 1 1
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MirrorDbConfig.java
  65. 169 170
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java
  66. 101 91
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java
  67. 37 29
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java
  68. 506 494
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java
  69. 10 9
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/BatchExecutor.java
  70. 2 2
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SyncUtil.java
  71. 5 3
      client-adapter/rdb/src/main/resources/rdb/mytest_user.yml
  72. 1 1
      client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/ConfigLoadTest.java
  73. 1 1
      client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/sync/Common.java
  74. 325 326
      client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java
  75. 35 32
      client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaFlatMessage.java
  76. 35 33
      client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaMessage.java
  77. 53 0
      common/src/main/java/com/alibaba/otter/canal/common/utils/CommonUtils.java
  78. 1 0
      deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java
  79. 11 1
      deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStater.java
  80. 2 71
      deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/DbRemoteConfigLoader.java
  81. 55 0
      deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/RemoteInstanceMonitorImpl.java
  82. 2 1
      deployer/src/main/resources/canal.properties
  83. 6 0
      filter/src/test/java/com/alibaba/otter/canal/filter/AviaterFilterTest.java
  84. 3 1
      instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java
  85. 11 2
      instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/model/CanalParameter.java
  86. 7 8
      parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java
  87. 35 45
      server/src/main/java/com/alibaba/otter/canal/common/MQMessageUtils.java
  88. 27 16
      server/src/main/java/com/alibaba/otter/canal/common/MQProperties.java
  89. 6 1
      server/src/main/java/com/alibaba/otter/canal/kafka/CanalKafkaProducer.java
  90. 12 4
      server/src/main/java/com/alibaba/otter/canal/server/CanalMQStarter.java
  91. 1 1
      sink/src/main/java/com/alibaba/otter/canal/sink/entry/group/GroupEventSink.java

+ 12 - 1
client-adapter/common/pom.xml

@@ -1,5 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>canal.client-adapter</artifactId>
         <groupId>com.alibaba.otter</groupId>
@@ -26,6 +27,16 @@
             <artifactId>druid</artifactId>
             <version>1.1.9</version>
         </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context</artifactId>
+            <version>5.0.5.RELEASE</version>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.19</version>
+        </dependency>
     </dependencies>
 
 </project>

+ 6 - 4
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/OuterAdapter.java

@@ -2,6 +2,7 @@ package com.alibaba.otter.canal.client.adapter;
 
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 
 import com.alibaba.otter.canal.client.adapter.support.Dml;
 import com.alibaba.otter.canal.client.adapter.support.EtlResult;
@@ -21,8 +22,9 @@ public interface OuterAdapter {
      * 外部适配器初始化接口
      *
      * @param configuration 外部适配器配置信息
+     * @param envProperties 环境变量的配置属性
      */
-    void init(OuterAdapterConfig configuration);
+    void init(OuterAdapterConfig configuration, Properties envProperties);
 
     /**
      * 往适配器中同步数据
@@ -38,7 +40,7 @@ public interface OuterAdapter {
 
     /**
      * Etl操作
-     * 
+     *
      * @param task 任务名, 对应配置名
      * @param params etl筛选条件
      */
@@ -48,7 +50,7 @@ public interface OuterAdapter {
 
     /**
      * 计算总数
-     * 
+     *
      * @param task 任务名, 对应配置名
      * @return 总数
      */
@@ -58,7 +60,7 @@ public interface OuterAdapter {
 
     /**
      * 通过task获取对应的destination
-     * 
+     *
      * @param task 任务名, 对应配置名
      * @return destination
      */

+ 155 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java

@@ -0,0 +1,155 @@
+package com.alibaba.otter.canal.client.adapter.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.springframework.util.PropertyPlaceholderHelper;
+import org.springframework.util.StringUtils;
+
+import com.alibaba.otter.canal.client.adapter.config.bind.PropertiesConfigurationFactory;
+import com.alibaba.otter.canal.client.adapter.config.common.*;
+
+/**
+ * 将yaml内容绑定到指定对象, 遵循spring yml的绑定规范
+ *
+ * @author reweerma 2019-2-1 上午9:14:02
+ * @version 1.0.0
+ */
+public class YmlConfigBinder {
+
+    /**
+     * 将当前内容绑定到指定对象
+     *
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String content, Class<T> clazz) {
+        return bindYmlToObj(null, content, clazz, null);
+    }
+
+    /**
+     * 将当前内容绑定到指定对象并指定内容编码格式
+     *
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String content, Class<T> clazz, String charset) {
+        return bindYmlToObj(null, content, clazz, charset);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz) {
+        return bindYmlToObj(prefix, content, clazz, null);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象并指定内容编码格式
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset) {
+        return bindYmlToObj(prefix, content, clazz, charset, null);
+    }
+
+    /**
+     * 将当前内容指定前缀部分绑定到指定对象并用环境变量中的属性替换占位符, 例: 当前内容有属性 zkServers: ${zookeeper.servers}
+     * 在envProperties中有属性 zookeeper.servers:
+     * 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 则当前内容 zkServers 会被替换为
+     * zkServers: 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 注: 假设绑定的类中
+     * zkServers 属性是 List<String> 对象, 则会自动映射成List
+     *
+     * @param prefix 指定前缀
+     * @param content yml内容
+     * @param clazz 指定对象类型
+     * @param charset yml内容编码格式
+     * @return 对象
+     */
+    public static <T> T bindYmlToObj(String prefix, String content, Class<T> clazz, String charset,
+                                     Properties baseProperties) {
+        try {
+            byte[] contentBytes;
+            if (charset == null) {
+                contentBytes = content.getBytes();
+            } else {
+                contentBytes = content.getBytes(charset);
+            }
+            YamlPropertySourceLoader propertySourceLoader = new YamlPropertySourceLoader();
+            Resource configResource = new ByteArrayResource(contentBytes);
+            PropertySource<?> propertySource = propertySourceLoader.load("manualBindConfig", configResource, null);
+
+            if (propertySource == null) {
+                return null;
+            }
+
+            Properties properties = new Properties();
+            Map<String, Object> propertiesRes = new LinkedHashMap<>();
+            if (!StringUtils.isEmpty(prefix) && !prefix.endsWith(".")) {
+                prefix = prefix + ".";
+            }
+
+            properties.putAll((Map<?, ?>) propertySource.getSource());
+
+            if (baseProperties != null) {
+                baseProperties.putAll(properties);
+                properties = baseProperties;
+            }
+
+            for (Map.Entry<?, ?> entry : ((Map<?, ?>) propertySource.getSource()).entrySet()) {
+                String key = (String) entry.getKey();
+                Object value = entry.getValue();
+
+                if (prefix != null) {
+                    if (key != null && key.startsWith(prefix)) {
+                        key = key.substring(prefix.length());
+                    } else {
+                        continue;
+                    }
+                }
+
+                if (value instanceof String && ((String) value).contains("${") && ((String) value).contains("}")) {
+                    PropertyPlaceholderHelper propertyPlaceholderHelper = new PropertyPlaceholderHelper("${", "}");
+                    value = propertyPlaceholderHelper.replacePlaceholders((String) value, properties);
+                }
+
+                propertiesRes.put(key, value);
+            }
+
+            if (propertiesRes.isEmpty()) {
+                return null;
+            }
+
+            propertySource = new MapPropertySource(propertySource.getName(), propertiesRes);
+
+            T target = clazz.newInstance();
+
+            MutablePropertySources propertySources = new MutablePropertySources();
+            propertySources.addFirst(propertySource);
+
+            PropertiesConfigurationFactory<Object> factory = new PropertiesConfigurationFactory<Object>(target);
+            factory.setPropertySources(propertySources);
+            factory.setIgnoreInvalidFields(true);
+            factory.setIgnoreUnknownFields(true);
+
+            factory.bindPropertiesToTarget();
+
+            return target;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+}

+ 97 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/DefaultPropertyNamePatternsMatcher.java

@@ -0,0 +1,97 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * {@link PropertyNamePatternsMatcher} that matches when a property name exactly
+ * matches one of the given names, or starts with one of the given names
+ * followed by a delimiter. This implementation is optimized for frequent calls.
+ *
+ * @author Phillip Webb
+ * @since 1.2.0
+ */
+class DefaultPropertyNamePatternsMatcher implements PropertyNamePatternsMatcher {
+
+    private final char[]   delimiters;
+
+    private final boolean  ignoreCase;
+
+    private final String[] names;
+
+    protected DefaultPropertyNamePatternsMatcher(char[] delimiters, String... names){
+        this(delimiters, false, names);
+    }
+
+    protected DefaultPropertyNamePatternsMatcher(char[] delimiters, boolean ignoreCase, String... names){
+        this(delimiters, ignoreCase, new HashSet<String>(Arrays.asList(names)));
+    }
+
+    DefaultPropertyNamePatternsMatcher(char[] delimiters, boolean ignoreCase, Set<String> names){
+        this.delimiters = delimiters;
+        this.ignoreCase = ignoreCase;
+        this.names = names.toArray(new String[names.size()]);
+    }
+
+    @Override
+    public boolean matches(String propertyName) {
+        char[] propertyNameChars = propertyName.toCharArray();
+        boolean[] match = new boolean[this.names.length];
+        boolean noneMatched = true;
+        for (int i = 0; i < this.names.length; i++) {
+            if (this.names[i].length() <= propertyNameChars.length) {
+                match[i] = true;
+                noneMatched = false;
+            }
+        }
+        if (noneMatched) {
+            return false;
+        }
+        for (int charIndex = 0; charIndex < propertyNameChars.length; charIndex++) {
+            for (int nameIndex = 0; nameIndex < this.names.length; nameIndex++) {
+                if (match[nameIndex]) {
+                    match[nameIndex] = false;
+                    if (charIndex < this.names[nameIndex].length()) {
+                        if (isCharMatch(this.names[nameIndex].charAt(charIndex), propertyNameChars[charIndex])) {
+                            match[nameIndex] = true;
+                            noneMatched = false;
+                        }
+                    } else {
+                        char charAfter = propertyNameChars[this.names[nameIndex].length()];
+                        if (isDelimiter(charAfter)) {
+                            match[nameIndex] = true;
+                            noneMatched = false;
+                        }
+                    }
+                }
+            }
+            if (noneMatched) {
+                return false;
+            }
+        }
+        for (int i = 0; i < match.length; i++) {
+            if (match[i]) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private boolean isCharMatch(char c1, char c2) {
+        if (this.ignoreCase) {
+            return Character.toLowerCase(c1) == Character.toLowerCase(c2);
+        }
+        return c1 == c2;
+    }
+
+    private boolean isDelimiter(char c) {
+        for (char delimiter : this.delimiters) {
+            if (c == delimiter) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

+ 31 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/InetAddressEditor.java

@@ -0,0 +1,31 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.beans.PropertyEditorSupport;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+/**
+ * {@link PropertyNamePatternsMatcher} that matches when a property name exactly
+ * matches one of the given names, or starts with one of the given names
+ * followed by a delimiter. This implementation is optimized for frequent calls.
+ *
+ * @author Phillip Webb
+ * @since 1.2.0
+ */
+class InetAddressEditor extends PropertyEditorSupport {
+
+    @Override
+    public String getAsText() {
+        return ((InetAddress) getValue()).getHostAddress();
+    }
+
+    @Override
+    public void setAsText(String text) throws IllegalArgumentException {
+        try {
+            setValue(InetAddress.getByName(text));
+        } catch (UnknownHostException ex) {
+            throw new IllegalArgumentException("Cannot locate host", ex);
+        }
+    }
+
+}

+ 52 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java

@@ -0,0 +1,52 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import org.springframework.beans.PropertyValue;
+
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
+
+/**
+ * A {@link PropertyValue} that can provide information about its origin.
+ *
+ * @author Andy Wilkinson
+ */
+class OriginCapablePropertyValue extends PropertyValue {
+
+    private static final String  ATTRIBUTE_PROPERTY_ORIGIN = "propertyOrigin";
+
+    private final PropertyOrigin origin;
+
+    OriginCapablePropertyValue(PropertyValue propertyValue){
+        this(propertyValue.getName(),
+            propertyValue.getValue(),
+            (PropertyOrigin) propertyValue.getAttribute(ATTRIBUTE_PROPERTY_ORIGIN));
+    }
+
+    OriginCapablePropertyValue(String name, Object value, String originName, PropertySource<?> originSource){
+        this(name, value, new PropertyOrigin(originSource, originName));
+    }
+
+    OriginCapablePropertyValue(String name, Object value, PropertyOrigin origin){
+        super(name, value);
+        this.origin = origin;
+        setAttribute(ATTRIBUTE_PROPERTY_ORIGIN, origin);
+    }
+
+    public PropertyOrigin getOrigin() {
+        return this.origin;
+    }
+
+    @Override
+    public String toString() {
+        String name = (this.origin != null ? this.origin.getName() : this.getName());
+        String source = (this.origin.getSource() != null ? this.origin.getSource().getName() : "unknown");
+        return "'" + name + "' from '" + source + "'";
+    }
+
+    public static PropertyOrigin getOrigin(PropertyValue propertyValue) {
+        if (propertyValue instanceof OriginCapablePropertyValue) {
+            return ((OriginCapablePropertyValue) propertyValue).getOrigin();
+        }
+        return new OriginCapablePropertyValue(propertyValue).getOrigin();
+    }
+
+}

+ 27 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PatternPropertyNamePatternsMatcher.java

@@ -0,0 +1,27 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.util.Collection;
+
+import org.springframework.util.PatternMatchUtils;
+
+/**
+ * {@link PropertyNamePatternsMatcher} that delegates to
+ * {@link PatternMatchUtils#simpleMatch(String[], String)}.
+ *
+ * @author Phillip Webb
+ * @since 1.2.0
+ */
+class PatternPropertyNamePatternsMatcher implements PropertyNamePatternsMatcher {
+
+    private final String[] patterns;
+
+    PatternPropertyNamePatternsMatcher(Collection<String> patterns){
+        this.patterns = (patterns != null ? patterns.toArray(new String[patterns.size()]) : new String[] {});
+    }
+
+    @Override
+    public boolean matches(String propertyName) {
+        return PatternMatchUtils.simpleMatch(this.patterns, propertyName);
+    }
+
+}

+ 356 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertiesConfigurationFactory.java

@@ -0,0 +1,356 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.beans.PropertyDescriptor;
+import java.util.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.beans.BeanUtils;
+import org.springframework.beans.PropertyValues;
+import org.springframework.beans.factory.FactoryBean;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.support.ResourceEditorRegistrar;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.context.MessageSource;
+import org.springframework.context.MessageSourceAware;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+import org.springframework.validation.*;
+
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySources;
+
+/**
+ * Validate some {@link Properties} (or optionally
+ * {@link org.springframework.core.env.PropertySources}) by binding them to an
+ * object of a specified type and then optionally running a {@link Validator}
+ * over it.
+ *
+ * @param <T> the target type
+ * @author Dave Syer
+ */
+public class PropertiesConfigurationFactory<T> implements FactoryBean<T>, ApplicationContextAware, MessageSourceAware, InitializingBean {
+
+    private static final char[] EXACT_DELIMITERS       = { '_', '.', '[' };
+
+    private static final char[] TARGET_NAME_DELIMITERS = { '_', '.' };
+
+    private static final Log    logger                 = LogFactory.getLog(PropertiesConfigurationFactory.class);
+
+    private boolean             ignoreUnknownFields    = true;
+
+    private boolean             ignoreInvalidFields;
+
+    private boolean             exceptionIfInvalid     = true;
+
+    private PropertySources     propertySources;
+
+    private final T             target;
+
+    private Validator           validator;
+
+    private ApplicationContext  applicationContext;
+
+    private MessageSource       messageSource;
+
+    private boolean             hasBeenBound           = false;
+
+    private boolean             ignoreNestedProperties = false;
+
+    private String              targetName;
+
+    private ConversionService   conversionService;
+
+    private boolean             resolvePlaceholders    = true;
+
+    /**
+     * Create a new {@link PropertiesConfigurationFactory} instance.
+     *
+     * @param target the target object to bind too
+     * @see #PropertiesConfigurationFactory(Class)
+     */
+    public PropertiesConfigurationFactory(T target){
+        Assert.notNull(target, "target must not be null");
+        this.target = target;
+    }
+
+    /**
+     * Create a new {@link PropertiesConfigurationFactory} instance.
+     *
+     * @param type the target type
+     * @see #PropertiesConfigurationFactory(Class)
+     */
+    @SuppressWarnings("unchecked")
+    public PropertiesConfigurationFactory(Class<?> type){
+        Assert.notNull(type, "type must not be null");
+        this.target = (T) BeanUtils.instantiate(type);
+    }
+
+    /**
+     * Flag to disable binding of nested properties (i.e. those with period
+     * separators in their paths). Can be useful to disable this if the name prefix
+     * is empty and you don't want to ignore unknown fields.
+     *
+     * @param ignoreNestedProperties the flag to set (default false)
+     */
+    public void setIgnoreNestedProperties(boolean ignoreNestedProperties) {
+        this.ignoreNestedProperties = ignoreNestedProperties;
+    }
+
+    /**
+     * Set whether to ignore unknown fields, that is, whether to ignore bind
+     * parameters that do not have corresponding fields in the target object.
+     * <p>
+     * Default is "true". Turn this off to enforce that all bind parameters must
+     * have a matching field in the target object.
+     *
+     * @param ignoreUnknownFields if unknown fields should be ignored
+     */
+    public void setIgnoreUnknownFields(boolean ignoreUnknownFields) {
+        this.ignoreUnknownFields = ignoreUnknownFields;
+    }
+
+    /**
+     * Set whether to ignore invalid fields, that is, whether to ignore bind
+     * parameters that have corresponding fields in the target object which are not
+     * accessible (for example because of null values in the nested path).
+     * <p>
+     * Default is "false". Turn this on to ignore bind parameters for nested objects
+     * in non-existing parts of the target object graph.
+     *
+     * @param ignoreInvalidFields if invalid fields should be ignored
+     */
+    public void setIgnoreInvalidFields(boolean ignoreInvalidFields) {
+        this.ignoreInvalidFields = ignoreInvalidFields;
+    }
+
+    /**
+     * Set the target name.
+     *
+     * @param targetName the target name
+     */
+    public void setTargetName(String targetName) {
+        this.targetName = targetName;
+    }
+
+    @Override
+    public void setApplicationContext(ApplicationContext applicationContext) {
+        this.applicationContext = applicationContext;
+    }
+
+    /**
+     * Set the message source.
+     *
+     * @param messageSource the message source
+     */
+    @Override
+    public void setMessageSource(MessageSource messageSource) {
+        this.messageSource = messageSource;
+    }
+
+    /**
+     * Set the property sources.
+     *
+     * @param propertySources the property sources
+     */
+    public void setPropertySources(PropertySources propertySources) {
+        this.propertySources = propertySources;
+    }
+
+    /**
+     * Set the conversion service.
+     *
+     * @param conversionService the conversion service
+     */
+    public void setConversionService(ConversionService conversionService) {
+        this.conversionService = conversionService;
+    }
+
+    /**
+     * Set the validator.
+     *
+     * @param validator the validator
+     */
+    public void setValidator(Validator validator) {
+        this.validator = validator;
+    }
+
+    /**
+     * Set a flag to indicate that an exception should be raised if a Validator is
+     * available and validation fails.
+     *
+     * @param exceptionIfInvalid the flag to set
+     * @deprecated as of 1.5, do not specify a {@link Validator} if validation
+     * should not occur
+     */
+    @Deprecated
+    public void setExceptionIfInvalid(boolean exceptionIfInvalid) {
+        this.exceptionIfInvalid = exceptionIfInvalid;
+    }
+
+    /**
+     * Flag to indicate that placeholders should be replaced during binding. Default
+     * is true.
+     *
+     * @param resolvePlaceholders flag value
+     */
+    public void setResolvePlaceholders(boolean resolvePlaceholders) {
+        this.resolvePlaceholders = resolvePlaceholders;
+    }
+
+    @Override
+    public void afterPropertiesSet() throws Exception {
+        bindPropertiesToTarget();
+    }
+
+    @Override
+    public Class<?> getObjectType() {
+        if (this.target == null) {
+            return Object.class;
+        }
+        return this.target.getClass();
+    }
+
+    @Override
+    public boolean isSingleton() {
+        return true;
+    }
+
+    @Override
+    public T getObject() throws Exception {
+        if (!this.hasBeenBound) {
+            bindPropertiesToTarget();
+        }
+        return this.target;
+    }
+
+    public void bindPropertiesToTarget() throws BindException {
+        Assert.state(this.propertySources != null, "PropertySources should not be null");
+        try {
+            if (logger.isTraceEnabled()) {
+                logger.trace("Property Sources: " + this.propertySources);
+
+            }
+            this.hasBeenBound = true;
+            doBindPropertiesToTarget();
+        } catch (BindException ex) {
+            if (this.exceptionIfInvalid) {
+                throw ex;
+            }
+            logger.error("Failed to load Properties validation bean. " + "Your Properties may be invalid.", ex);
+        }
+    }
+
+    private void doBindPropertiesToTarget() throws BindException {
+        RelaxedDataBinder dataBinder = (this.targetName != null ? new RelaxedDataBinder(this.target,
+            this.targetName) : new RelaxedDataBinder(this.target));
+        if (this.validator != null && this.validator.supports(dataBinder.getTarget().getClass())) {
+            dataBinder.setValidator(this.validator);
+        }
+        if (this.conversionService != null) {
+            dataBinder.setConversionService(this.conversionService);
+        }
+        dataBinder.setAutoGrowCollectionLimit(Integer.MAX_VALUE);
+        dataBinder.setIgnoreNestedProperties(this.ignoreNestedProperties);
+        dataBinder.setIgnoreInvalidFields(this.ignoreInvalidFields);
+        dataBinder.setIgnoreUnknownFields(this.ignoreUnknownFields);
+        customizeBinder(dataBinder);
+        if (this.applicationContext != null) {
+            ResourceEditorRegistrar resourceEditorRegistrar = new ResourceEditorRegistrar(this.applicationContext,
+                this.applicationContext.getEnvironment());
+            resourceEditorRegistrar.registerCustomEditors(dataBinder);
+        }
+        Iterable<String> relaxedTargetNames = getRelaxedTargetNames();
+        Set<String> names = getNames(relaxedTargetNames);
+        PropertyValues propertyValues = getPropertySourcesPropertyValues(names, relaxedTargetNames);
+        dataBinder.bind(propertyValues);
+        if (this.validator != null) {
+            dataBinder.validate();
+        }
+        checkForBindingErrors(dataBinder);
+    }
+
+    private Iterable<String> getRelaxedTargetNames() {
+        return (this.target != null
+                && StringUtils.hasLength(this.targetName) ? new RelaxedNames(this.targetName) : null);
+    }
+
+    private Set<String> getNames(Iterable<String> prefixes) {
+        Set<String> names = new LinkedHashSet<String>();
+        if (this.target != null) {
+            PropertyDescriptor[] descriptors = BeanUtils.getPropertyDescriptors(this.target.getClass());
+            for (PropertyDescriptor descriptor : descriptors) {
+                String name = descriptor.getName();
+                if (!name.equals("class")) {
+                    RelaxedNames relaxedNames = RelaxedNames.forCamelCase(name);
+                    if (prefixes == null) {
+                        for (String relaxedName : relaxedNames) {
+                            names.add(relaxedName);
+                        }
+                    } else {
+                        for (String prefix : prefixes) {
+                            for (String relaxedName : relaxedNames) {
+                                names.add(prefix + "." + relaxedName);
+                                names.add(prefix + "_" + relaxedName);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+        return names;
+    }
+
+    private PropertyValues getPropertySourcesPropertyValues(Set<String> names, Iterable<String> relaxedTargetNames) {
+        PropertyNamePatternsMatcher includes = getPropertyNamePatternsMatcher(names, relaxedTargetNames);
+        return new PropertySourcesPropertyValues(this.propertySources, names, includes, this.resolvePlaceholders);
+    }
+
+    private PropertyNamePatternsMatcher getPropertyNamePatternsMatcher(Set<String> names,
+                                                                       Iterable<String> relaxedTargetNames) {
+        if (this.ignoreUnknownFields && !isMapTarget()) {
+            // Since unknown fields are ignored we can filter them out early to save
+            // unnecessary calls to the PropertySource.
+            return new DefaultPropertyNamePatternsMatcher(EXACT_DELIMITERS, true, names);
+        }
+        if (relaxedTargetNames != null) {
+            // We can filter properties to those starting with the target name, but
+            // we can't do a complete filter since we need to trigger the
+            // unknown fields check
+            Set<String> relaxedNames = new HashSet<String>();
+            for (String relaxedTargetName : relaxedTargetNames) {
+                relaxedNames.add(relaxedTargetName);
+            }
+            return new DefaultPropertyNamePatternsMatcher(TARGET_NAME_DELIMITERS, true, relaxedNames);
+        }
+        // Not ideal, we basically can't filter anything
+        return PropertyNamePatternsMatcher.ALL;
+    }
+
+    private boolean isMapTarget() {
+        return this.target != null && Map.class.isAssignableFrom(this.target.getClass());
+    }
+
+    private void checkForBindingErrors(RelaxedDataBinder dataBinder) throws BindException {
+        BindingResult errors = dataBinder.getBindingResult();
+        if (errors.hasErrors()) {
+            logger.error("Properties configuration failed validation");
+            for (ObjectError error : errors.getAllErrors()) {
+                logger.error(this.messageSource != null ? this.messageSource.getMessage(error, Locale.getDefault())
+                                                          + " (" + error + ")" : error);
+            }
+            if (this.exceptionIfInvalid) {
+                throw new BindException(errors);
+            }
+        }
+    }
+
+    /**
+     * Customize the data binder.
+     *
+     * @param dataBinder the data binder that will be used to bind and validate
+     */
+    protected void customizeBinder(DataBinder dataBinder) {
+    }
+}

+ 38 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyNamePatternsMatcher.java

@@ -0,0 +1,38 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+/**
+ * Strategy interface used to check if a property name matches specific
+ * criteria.
+ *
+ * @author Phillip Webb
+ * @since 1.2.0
+ */
+interface PropertyNamePatternsMatcher {
+
+    PropertyNamePatternsMatcher ALL  = new PropertyNamePatternsMatcher() {
+
+                                         @Override
+                                         public boolean matches(String propertyName) {
+                                             return true;
+                                         }
+
+                                     };
+
+    PropertyNamePatternsMatcher NONE = new PropertyNamePatternsMatcher() {
+
+                                         @Override
+                                         public boolean matches(String propertyName) {
+                                             return false;
+                                         }
+
+                                     };
+
+    /**
+     * Return {@code true} of the property name matches.
+     *
+     * @param propertyName the property name
+     * @return {@code true} if the property name matches
+     */
+    boolean matches(String propertyName);
+
+}

+ 30 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyOrigin.java

@@ -0,0 +1,30 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
+
+/**
+ * The origin of a property, specifically its source and its name before any
+ * prefix was removed.
+ *
+ * @author Andy Wilkinson
+ * @since 1.3.0
+ */
+public class PropertyOrigin {
+
+    private final PropertySource<?> source;
+
+    private final String            name;
+
+    PropertyOrigin(PropertySource<?> source, String name){
+        this.name = name;
+        this.source = source;
+    }
+
+    public PropertySource<?> getSource() {
+        return this.source;
+    }
+
+    public String getName() {
+        return this.name;
+    }
+}

+ 164 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyResolver.java

@@ -0,0 +1,164 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import org.springframework.core.convert.ConversionException;
+import org.springframework.core.env.AbstractEnvironment;
+import org.springframework.core.env.AbstractPropertyResolver;
+import org.springframework.core.env.PropertyResolver;
+import org.springframework.util.ClassUtils;
+
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySources;
+
+/**
+ * {@link PropertyResolver} implementation that resolves property values against
+ * an underlying set of {@link PropertySources}.
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @see PropertySource
+ * @see PropertySources
+ * @see AbstractEnvironment
+ * @since 3.1
+ */
+public class PropertySourcesPropertyResolver extends AbstractPropertyResolver {
+
+    private final PropertySources propertySources;
+
+    /**
+     * Create a new resolver against the given property sources.
+     *
+     * @param propertySources the set of {@link PropertySource} objects to use
+     */
+    public PropertySourcesPropertyResolver(PropertySources propertySources){
+        this.propertySources = propertySources;
+    }
+
+    @Override
+    public boolean containsProperty(String key) {
+        if (this.propertySources != null) {
+            for (PropertySource<?> propertySource : this.propertySources) {
+                if (propertySource.containsProperty(key)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public String getProperty(String key) {
+        return getProperty(key, String.class, true);
+    }
+
+    @Override
+    public <T> T getProperty(String key, Class<T> targetValueType) {
+        return getProperty(key, targetValueType, true);
+    }
+
+    @Override
+    protected String getPropertyAsRawString(String key) {
+        return getProperty(key, String.class, false);
+    }
+
+    protected <T> T getProperty(String key, Class<T> targetValueType, boolean resolveNestedPlaceholders) {
+        if (this.propertySources != null) {
+            for (PropertySource<?> propertySource : this.propertySources) {
+                if (logger.isTraceEnabled()) {
+                    logger
+                        .trace("Searching for key '" + key + "' in PropertySource '" + propertySource.getName() + "'");
+                }
+                Object value = propertySource.getProperty(key);
+                if (value != null) {
+                    if (resolveNestedPlaceholders && value instanceof String) {
+                        value = resolveNestedPlaceholders((String) value);
+                    }
+                    logKeyFound(key, propertySource, value);
+                    return convertValueIfNecessary(value, targetValueType);
+                }
+            }
+        }
+        if (logger.isDebugEnabled()) {
+            logger.debug("Could not find key '" + key + "' in any property source");
+        }
+        return null;
+    }
+
+    @Deprecated
+    public <T> Class<T> getPropertyAsClass(String key, Class<T> targetValueType) {
+        if (this.propertySources != null) {
+            for (PropertySource<?> propertySource : this.propertySources) {
+                if (logger.isTraceEnabled()) {
+                    logger.trace(String.format("Searching for key '%s' in [%s]", key, propertySource.getName()));
+                }
+                Object value = propertySource.getProperty(key);
+                if (value != null) {
+                    logKeyFound(key, propertySource, value);
+                    Class<?> clazz;
+                    if (value instanceof String) {
+                        try {
+                            clazz = ClassUtils.forName((String) value, null);
+                        } catch (Exception ex) {
+                            throw new PropertySourcesPropertyResolver.ClassConversionException((String) value,
+                                targetValueType,
+                                ex);
+                        }
+                    } else if (value instanceof Class) {
+                        clazz = (Class<?>) value;
+                    } else {
+                        clazz = value.getClass();
+                    }
+                    if (!targetValueType.isAssignableFrom(clazz)) {
+                        throw new PropertySourcesPropertyResolver.ClassConversionException(clazz, targetValueType);
+                    }
+                    @SuppressWarnings("unchecked")
+                    Class<T> targetClass = (Class<T>) clazz;
+                    return targetClass;
+                }
+            }
+        }
+        if (logger.isDebugEnabled()) {
+            logger.debug(String.format("Could not find key '%s' in any property source", key));
+        }
+        return null;
+    }
+
+    /**
+     * Log the given key as found in the given {@link PropertySource}, resulting in
+     * the given value.
+     * <p>
+     * The default implementation writes a debug log message with key and source. As
+     * of 4.3.3, this does not log the value anymore in order to avoid accidental
+     * logging of sensitive settings. Subclasses may override this method to change
+     * the log level and/or log message, including the property's value if desired.
+     *
+     * @param key the key found
+     * @param propertySource the {@code PropertySource} that the key has been found
+     *     in
+     * @param value the corresponding value
+     * @since 4.3.1
+     */
+    protected void logKeyFound(String key, PropertySource<?> propertySource, Object value) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Found key '" + key + "' in PropertySource '" + propertySource.getName()
+                         + "' with value of type " + value.getClass().getSimpleName());
+        }
+    }
+
+    @SuppressWarnings("serial")
+    @Deprecated
+    private static class ClassConversionException extends ConversionException {
+
+        public ClassConversionException(Class<?> actual, Class<?> expected){
+            super(String
+                .format("Actual type %s is not assignable to expected type %s", actual.getName(), expected.getName()));
+        }
+
+        public ClassConversionException(String actual, Class<?> expected, Exception ex){
+            super(
+                String
+                    .format("Could not find/load class %s during attempt to convert to %s", actual, expected.getName()),
+                ex);
+        }
+    }
+
+}

+ 233 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyValues.java

@@ -0,0 +1,233 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Pattern;
+
+import org.springframework.beans.MutablePropertyValues;
+import org.springframework.beans.PropertyValue;
+import org.springframework.beans.PropertyValues;
+import org.springframework.util.Assert;
+import org.springframework.validation.DataBinder;
+
+import com.alibaba.otter.canal.client.adapter.config.common.CompositePropertySource;
+import com.alibaba.otter.canal.client.adapter.config.common.EnumerablePropertySource;
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySource;
+import com.alibaba.otter.canal.client.adapter.config.common.PropertySources;
+
+/**
+ * A {@link PropertyValues} implementation backed by a {@link PropertySources},
+ * bridging the two abstractions and allowing (for instance) a regular
+ * {@link DataBinder} to be used with the latter.
+ *
+ * @author Dave Syer
+ * @author Phillip Webb
+ */
+public class PropertySourcesPropertyValues implements PropertyValues {
+
+    private static final Pattern                               COLLECTION_PROPERTY = Pattern
+        .compile("\\[(\\d+)\\](\\.\\S+)?");
+
+    private final PropertySources                              propertySources;
+
+    private final Collection<String>                           nonEnumerableFallbackNames;
+
+    private final PropertyNamePatternsMatcher                  includes;
+
+    private final Map<String, PropertyValue>                   propertyValues      = new LinkedHashMap<String, PropertyValue>();
+
+    private final ConcurrentHashMap<String, PropertySource<?>> collectionOwners    = new ConcurrentHashMap<String, PropertySource<?>>();
+
+    private final boolean                                      resolvePlaceholders;
+
+    /**
+     * Create a new PropertyValues from the given PropertySources.
+     *
+     * @param propertySources a PropertySources instance
+     */
+    public PropertySourcesPropertyValues(PropertySources propertySources){
+        this(propertySources, true);
+    }
+
+    /**
+     * Create a new PropertyValues from the given PropertySources that will
+     * optionally resolve placeholders.
+     *
+     * @param propertySources a PropertySources instance
+     * @param resolvePlaceholders {@code true} if placeholders should be resolved.
+     * @since 1.5.2
+     */
+    public PropertySourcesPropertyValues(PropertySources propertySources, boolean resolvePlaceholders){
+        this(propertySources, (Collection<String>) null, PropertyNamePatternsMatcher.ALL, resolvePlaceholders);
+    }
+
+    /**
+     * Create a new PropertyValues from the given PropertySources.
+     *
+     * @param propertySources a PropertySources instance
+     * @param includePatterns property name patterns to include from system
+     *     properties and environment variables
+     * @param nonEnumerableFallbackNames the property names to try in lieu of an
+     *     {@link EnumerablePropertySource}.
+     */
+    public PropertySourcesPropertyValues(PropertySources propertySources, Collection<String> includePatterns,
+                                         Collection<String> nonEnumerableFallbackNames){
+        this(propertySources,
+            nonEnumerableFallbackNames,
+            new PatternPropertyNamePatternsMatcher(includePatterns),
+            true);
+    }
+
+    /**
+     * Create a new PropertyValues from the given PropertySources.
+     *
+     * @param propertySources a PropertySources instance
+     * @param nonEnumerableFallbackNames the property names to try in lieu of an
+     *     {@link EnumerablePropertySource}.
+     * @param includes the property name patterns to include
+     * @param resolvePlaceholders flag to indicate the placeholders should be
+     *     resolved
+     */
+    PropertySourcesPropertyValues(PropertySources propertySources, Collection<String> nonEnumerableFallbackNames,
+                                  PropertyNamePatternsMatcher includes, boolean resolvePlaceholders){
+        Assert.notNull(propertySources, "PropertySources must not be null");
+        Assert.notNull(includes, "Includes must not be null");
+        this.propertySources = propertySources;
+        this.nonEnumerableFallbackNames = nonEnumerableFallbackNames;
+        this.includes = includes;
+        this.resolvePlaceholders = resolvePlaceholders;
+        PropertySourcesPropertyResolver resolver = new PropertySourcesPropertyResolver(propertySources);
+        for (PropertySource<?> source : propertySources) {
+            processPropertySource(source, resolver);
+        }
+    }
+
+    private void processPropertySource(PropertySource<?> source, PropertySourcesPropertyResolver resolver) {
+        if (source instanceof CompositePropertySource) {
+            processCompositePropertySource((CompositePropertySource) source, resolver);
+        } else if (source instanceof EnumerablePropertySource) {
+            processEnumerablePropertySource((EnumerablePropertySource<?>) source, resolver, this.includes);
+        } else {
+            processNonEnumerablePropertySource(source, resolver);
+        }
+    }
+
+    private void processCompositePropertySource(CompositePropertySource source,
+                                                PropertySourcesPropertyResolver resolver) {
+        for (PropertySource<?> nested : source.getPropertySources()) {
+            processPropertySource(nested, resolver);
+        }
+    }
+
+    private void processEnumerablePropertySource(EnumerablePropertySource<?> source,
+                                                 PropertySourcesPropertyResolver resolver,
+                                                 PropertyNamePatternsMatcher includes) {
+        if (source.getPropertyNames().length > 0) {
+            for (String propertyName : source.getPropertyNames()) {
+                if (includes.matches(propertyName)) {
+                    Object value = getEnumerableProperty(source, resolver, propertyName);
+                    putIfAbsent(propertyName, value, source);
+                }
+            }
+        }
+    }
+
+    private Object getEnumerableProperty(EnumerablePropertySource<?> source, PropertySourcesPropertyResolver resolver,
+                                         String propertyName) {
+        try {
+            if (this.resolvePlaceholders) {
+                return resolver.getProperty(propertyName, Object.class);
+            }
+        } catch (RuntimeException ex) {
+            // Probably could not resolve placeholders, ignore it here
+        }
+        return source.getProperty(propertyName);
+    }
+
+    private void processNonEnumerablePropertySource(PropertySource<?> source,
+                                                    PropertySourcesPropertyResolver resolver) {
+        // We can only do exact matches for non-enumerable property names, but
+        // that's better than nothing...
+        if (this.nonEnumerableFallbackNames == null) {
+            return;
+        }
+        for (String propertyName : this.nonEnumerableFallbackNames) {
+            if (!source.containsProperty(propertyName)) {
+                continue;
+            }
+            Object value = null;
+            try {
+                value = resolver.getProperty(propertyName, Object.class);
+            } catch (RuntimeException ex) {
+                // Probably could not convert to Object, weird, but ignorable
+            }
+            if (value == null) {
+                value = source.getProperty(propertyName.toUpperCase(Locale.ENGLISH));
+            }
+            putIfAbsent(propertyName, value, source);
+        }
+    }
+
+    @Override
+    public PropertyValue[] getPropertyValues() {
+        Collection<PropertyValue> values = this.propertyValues.values();
+        return values.toArray(new PropertyValue[values.size()]);
+    }
+
+    @Override
+    public PropertyValue getPropertyValue(String propertyName) {
+        PropertyValue propertyValue = this.propertyValues.get(propertyName);
+        if (propertyValue != null) {
+            return propertyValue;
+        }
+        for (PropertySource<?> source : this.propertySources) {
+            Object value = source.getProperty(propertyName);
+            propertyValue = putIfAbsent(propertyName, value, source);
+            if (propertyValue != null) {
+                return propertyValue;
+            }
+        }
+        return null;
+    }
+
+    private PropertyValue putIfAbsent(String propertyName, Object value, PropertySource<?> source) {
+        if (value != null && !this.propertyValues.containsKey(propertyName)) {
+            PropertySource<?> collectionOwner = this.collectionOwners
+                .putIfAbsent(COLLECTION_PROPERTY.matcher(propertyName).replaceAll("[]"), source);
+            if (collectionOwner == null || collectionOwner == source) {
+                PropertyValue propertyValue = new OriginCapablePropertyValue(propertyName, value, propertyName, source);
+                this.propertyValues.put(propertyName, propertyValue);
+                return propertyValue;
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public PropertyValues changesSince(PropertyValues old) {
+        MutablePropertyValues changes = new MutablePropertyValues();
+        // for each property value in the new set
+        for (PropertyValue newValue : getPropertyValues()) {
+            // if there wasn't an old one, add it
+            PropertyValue oldValue = old.getPropertyValue(newValue.getName());
+            if (oldValue == null || !oldValue.equals(newValue)) {
+                changes.addPropertyValue(newValue);
+            }
+        }
+        return changes;
+    }
+
+    @Override
+    public boolean contains(String propertyName) {
+        return getPropertyValue(propertyName) != null;
+    }
+
+    @Override
+    public boolean isEmpty() {
+        return this.propertyValues.isEmpty();
+    }
+
+}

+ 127 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java

@@ -0,0 +1,127 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.util.EnumSet;
+import java.util.Locale;
+import java.util.Set;
+
+import org.springframework.core.convert.ConversionFailedException;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.core.convert.TypeDescriptor;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.core.convert.converter.ConverterFactory;
+import org.springframework.core.convert.support.DefaultConversionService;
+import org.springframework.core.convert.support.GenericConversionService;
+import org.springframework.util.Assert;
+
+/**
+ * Internal {@link ConversionService} used by {@link RelaxedDataBinder} to
+ * support additional relaxed conversion.
+ *
+ * @author Phillip Webb
+ * @author Stephane Nicoll
+ * @since 1.1.0
+ */
+class RelaxedConversionService implements ConversionService {
+
+    private final ConversionService        conversionService;
+
+    private final GenericConversionService additionalConverters;
+
+    /**
+     * Create a new {@link RelaxedConversionService} instance.
+     *
+     * @param conversionService and option root conversion service
+     */
+    RelaxedConversionService(ConversionService conversionService){
+        this.conversionService = conversionService;
+        this.additionalConverters = new GenericConversionService();
+        DefaultConversionService.addCollectionConverters(this.additionalConverters);
+        this.additionalConverters
+            .addConverterFactory(new RelaxedConversionService.StringToEnumIgnoringCaseConverterFactory());
+        this.additionalConverters.addConverter(new StringToCharArrayConverter());
+    }
+
+    @Override
+    public boolean canConvert(Class<?> sourceType, Class<?> targetType) {
+        return (this.conversionService != null && this.conversionService.canConvert(sourceType, targetType))
+               || this.additionalConverters.canConvert(sourceType, targetType);
+    }
+
+    @Override
+    public boolean canConvert(TypeDescriptor sourceType, TypeDescriptor targetType) {
+        return (this.conversionService != null && this.conversionService.canConvert(sourceType, targetType))
+               || this.additionalConverters.canConvert(sourceType, targetType);
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public <T> T convert(Object source, Class<T> targetType) {
+        Assert.notNull(targetType, "The targetType to convert to cannot be null");
+        return (T) convert(source, TypeDescriptor.forObject(source), TypeDescriptor.valueOf(targetType));
+    }
+
+    @Override
+    public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
+        if (this.conversionService != null) {
+            try {
+                return this.conversionService.convert(source, sourceType, targetType);
+            } catch (ConversionFailedException ex) {
+                // Ignore and try the additional converters
+            }
+        }
+        return this.additionalConverters.convert(source, sourceType, targetType);
+    }
+
+    /**
+     * Clone of Spring's package private StringToEnumConverterFactory, but ignoring
+     * the case of the source.
+     */
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    private static class StringToEnumIgnoringCaseConverterFactory implements ConverterFactory<String, Enum> {
+
+        @Override
+        public <T extends Enum> Converter<String, T> getConverter(Class<T> targetType) {
+            Class<?> enumType = targetType;
+            while (enumType != null && !enumType.isEnum()) {
+                enumType = enumType.getSuperclass();
+            }
+            Assert.notNull(enumType, "The target type " + targetType.getName() + " does not refer to an enum");
+            return new RelaxedConversionService.StringToEnumIgnoringCaseConverterFactory.StringToEnum(enumType);
+        }
+
+        private class StringToEnum<T extends Enum> implements Converter<String, T> {
+
+            private final Class<T> enumType;
+
+            StringToEnum(Class<T> enumType){
+                this.enumType = enumType;
+            }
+
+            @Override
+            public T convert(String source) {
+                if (source.isEmpty()) {
+                    // It's an empty enum identifier: reset the enum value to null.
+                    return null;
+                }
+                source = source.trim();
+                for (T candidate : (Set<T>) EnumSet.allOf(this.enumType)) {
+                    RelaxedNames names = new RelaxedNames(
+                        candidate.name().replace('_', '-').toLowerCase(Locale.ENGLISH));
+                    for (String name : names) {
+                        if (name.equals(source)) {
+                            return candidate;
+                        }
+                    }
+                    if (candidate.name().equalsIgnoreCase(source)) {
+                        return candidate;
+                    }
+                }
+                throw new IllegalArgumentException(
+                    "No enum constant " + this.enumType.getCanonicalName() + "." + source);
+            }
+
+        }
+
+    }
+
+}

+ 729 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedDataBinder.java

@@ -0,0 +1,729 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.beans.PropertyEditor;
+import java.net.InetAddress;
+import java.util.*;
+
+import org.springframework.beans.*;
+import org.springframework.beans.propertyeditors.FileEditor;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.core.convert.TypeDescriptor;
+import org.springframework.core.env.StandardEnvironment;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
+import org.springframework.util.StringUtils;
+import org.springframework.validation.AbstractPropertyBindingResult;
+import org.springframework.validation.BeanPropertyBindingResult;
+import org.springframework.validation.DataBinder;
+
+/**
+ * Binder implementation that allows caller to bind to maps and also allows
+ * property names to match a bit loosely (if underscores or dashes are removed
+ * and replaced with camel case for example).
+ *
+ * @author Dave Syer
+ * @author Phillip Webb
+ * @author Stephane Nicoll
+ * @author Andy Wilkinson
+ * @see RelaxedNames
+ */
+public class RelaxedDataBinder extends DataBinder {
+
+    private static final Set<Class<?>> EXCLUDED_EDITORS;
+
+    static {
+        Set<Class<?>> excluded = new HashSet<Class<?>>();
+        excluded.add(FileEditor.class);
+        EXCLUDED_EDITORS = Collections.unmodifiableSet(excluded);
+    }
+
+    private static final Object           BLANK       = new Object();
+
+    private String                        namePrefix;
+
+    private boolean                       ignoreNestedProperties;
+
+    private MultiValueMap<String, String> nameAliases = new LinkedMultiValueMap<String, String>();
+
+    /**
+     * Create a new {@link RelaxedDataBinder} instance.
+     *
+     * @param target the target into which properties are bound
+     */
+    public RelaxedDataBinder(Object target){
+        super(wrapTarget(target));
+    }
+
+    /**
+     * Create a new {@link RelaxedDataBinder} instance.
+     *
+     * @param target the target into which properties are bound
+     * @param namePrefix An optional prefix to be used when reading properties
+     */
+    public RelaxedDataBinder(Object target, String namePrefix){
+        super(wrapTarget(target), (StringUtils.hasLength(namePrefix) ? namePrefix : DEFAULT_OBJECT_NAME));
+        this.namePrefix = cleanNamePrefix(namePrefix);
+    }
+
+    private String cleanNamePrefix(String namePrefix) {
+        if (!StringUtils.hasLength(namePrefix)) {
+            return null;
+        }
+        return (namePrefix.endsWith(".") ? namePrefix : namePrefix + ".");
+    }
+
+    /**
+     * Flag to disable binding of nested properties (i.e. those with period
+     * separators in their paths). Can be useful to disable this if the name prefix
+     * is empty and you don't want to ignore unknown fields.
+     *
+     * @param ignoreNestedProperties the flag to set (default false)
+     */
+    public void setIgnoreNestedProperties(boolean ignoreNestedProperties) {
+        this.ignoreNestedProperties = ignoreNestedProperties;
+    }
+
+    /**
+     * Set name aliases.
+     *
+     * @param aliases a map of property name to aliases
+     */
+    public void setNameAliases(Map<String, List<String>> aliases) {
+        this.nameAliases = new LinkedMultiValueMap<String, String>(aliases);
+    }
+
+    /**
+     * Add aliases to the {@link DataBinder}.
+     *
+     * @param name the property name to alias
+     * @param alias aliases for the property names
+     * @return this instance
+     */
+    public RelaxedDataBinder withAlias(String name, String... alias) {
+        for (String value : alias) {
+            this.nameAliases.add(name, value);
+        }
+        return this;
+    }
+
+    @Override
+    protected void doBind(MutablePropertyValues propertyValues) {
+        super.doBind(modifyProperties(propertyValues, getTarget()));
+    }
+
+    /**
+     * Modify the property values so that period separated property paths are valid
+     * for map keys. Also creates new maps for properties of map type that are null
+     * (assuming all maps are potentially nested). The standard bracket {@code[...]}
+     * dereferencing is also accepted.
+     *
+     * @param propertyValues the property values
+     * @param target the target object
+     * @return modified property values
+     */
+    private MutablePropertyValues modifyProperties(MutablePropertyValues propertyValues, Object target) {
+        propertyValues = getPropertyValuesForNamePrefix(propertyValues);
+        if (target instanceof RelaxedDataBinder.MapHolder) {
+            propertyValues = addMapPrefix(propertyValues);
+        }
+        BeanWrapper wrapper = new BeanWrapperImpl(target);
+        wrapper.setConversionService(new RelaxedConversionService(getConversionService()));
+        wrapper.setAutoGrowNestedPaths(true);
+        List<PropertyValue> sortedValues = new ArrayList<PropertyValue>();
+        Set<String> modifiedNames = new HashSet<String>();
+        List<String> sortedNames = getSortedPropertyNames(propertyValues);
+        for (String name : sortedNames) {
+            PropertyValue propertyValue = propertyValues.getPropertyValue(name);
+            PropertyValue modifiedProperty = modifyProperty(wrapper, propertyValue);
+            if (modifiedNames.add(modifiedProperty.getName())) {
+                sortedValues.add(modifiedProperty);
+            }
+        }
+        return new MutablePropertyValues(sortedValues);
+    }
+
+    private List<String> getSortedPropertyNames(MutablePropertyValues propertyValues) {
+        List<String> names = new LinkedList<String>();
+        for (PropertyValue propertyValue : propertyValues.getPropertyValueList()) {
+            names.add(propertyValue.getName());
+        }
+        sortPropertyNames(names);
+        return names;
+    }
+
+    /**
+     * Sort by name so that parent properties get processed first (e.g. 'foo.bar'
+     * before 'foo.bar.spam'). Don't use Collections.sort() because the order might
+     * be significant for other property names (it shouldn't be but who knows what
+     * people might be relying on, e.g. HSQL has a JDBCXADataSource where
+     * "databaseName" is a synonym for "url").
+     *
+     * @param names the names to sort
+     */
+    private void sortPropertyNames(List<String> names) {
+        for (String name : new ArrayList<String>(names)) {
+            int propertyIndex = names.indexOf(name);
+            RelaxedDataBinder.BeanPath path = new RelaxedDataBinder.BeanPath(name);
+            for (String prefix : path.prefixes()) {
+                int prefixIndex = names.indexOf(prefix);
+                if (prefixIndex >= propertyIndex) {
+                    // The child property has a parent in the list in the wrong order
+                    names.remove(name);
+                    names.add(prefixIndex, name);
+                }
+            }
+        }
+    }
+
+    private MutablePropertyValues addMapPrefix(MutablePropertyValues propertyValues) {
+        MutablePropertyValues rtn = new MutablePropertyValues();
+        for (PropertyValue pv : propertyValues.getPropertyValues()) {
+            rtn.add("map." + pv.getName(), pv.getValue());
+        }
+        return rtn;
+    }
+
+    private MutablePropertyValues getPropertyValuesForNamePrefix(MutablePropertyValues propertyValues) {
+        if (!StringUtils.hasText(this.namePrefix) && !this.ignoreNestedProperties) {
+            return propertyValues;
+        }
+        MutablePropertyValues rtn = new MutablePropertyValues();
+        for (PropertyValue value : propertyValues.getPropertyValues()) {
+            String name = value.getName();
+            for (String prefix : new RelaxedNames(stripLastDot(this.namePrefix))) {
+                for (String separator : new String[] { ".", "_" }) {
+                    String candidate = (StringUtils.hasLength(prefix) ? prefix + separator : prefix);
+                    if (name.startsWith(candidate)) {
+                        name = name.substring(candidate.length());
+                        if (!(this.ignoreNestedProperties && name.contains("."))) {
+                            PropertyOrigin propertyOrigin = OriginCapablePropertyValue.getOrigin(value);
+                            rtn.addPropertyValue(
+                                new OriginCapablePropertyValue(name, value.getValue(), propertyOrigin));
+                        }
+                    }
+                }
+            }
+        }
+        return rtn;
+    }
+
+    private String stripLastDot(String string) {
+        if (StringUtils.hasLength(string) && string.endsWith(".")) {
+            string = string.substring(0, string.length() - 1);
+        }
+        return string;
+    }
+
+    private PropertyValue modifyProperty(BeanWrapper target, PropertyValue propertyValue) {
+        String name = propertyValue.getName();
+        String normalizedName = normalizePath(target, name);
+        if (!normalizedName.equals(name)) {
+            return new PropertyValue(normalizedName, propertyValue.getValue());
+        }
+        return propertyValue;
+    }
+
+    /**
+     * Normalize a bean property path to a format understood by a BeanWrapper. This
+     * is used so that
+     * <ul>
+     * <li>Fuzzy matching can be employed for bean property names</li>
+     * <li>Period separators can be used instead of indexing ([...]) for map
+     * keys</li>
+     * </ul>
+     *
+     * @param wrapper a bean wrapper for the object to bind
+     * @param path the bean path to bind
+     * @return a transformed path with correct bean wrapper syntax
+     */
+    protected String normalizePath(BeanWrapper wrapper, String path) {
+        return initializePath(wrapper, new RelaxedDataBinder.BeanPath(path), 0);
+    }
+
+    @Override
+    protected AbstractPropertyBindingResult createBeanPropertyBindingResult() {
+        return new RelaxedDataBinder.RelaxedBeanPropertyBindingResult(getTarget(),
+            getObjectName(),
+            isAutoGrowNestedPaths(),
+            getAutoGrowCollectionLimit(),
+            getConversionService());
+    }
+
+    private String initializePath(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) {
+        String prefix = path.prefix(index);
+        String key = path.name(index);
+        if (path.isProperty(index)) {
+            key = getActualPropertyName(wrapper, prefix, key);
+            path.rename(index, key);
+        }
+        if (path.name(++index) == null) {
+            return path.toString();
+        }
+        String name = path.prefix(index);
+        TypeDescriptor descriptor = wrapper.getPropertyTypeDescriptor(name);
+        if (descriptor == null || descriptor.isMap()) {
+            if (isMapValueStringType(descriptor) || isBlanked(wrapper, name, path.name(index))) {
+                path.collapseKeys(index);
+            }
+            path.mapIndex(index);
+            extendMapIfNecessary(wrapper, path, index);
+        } else if (descriptor.isCollection()) {
+            extendCollectionIfNecessary(wrapper, path, index);
+        } else if (descriptor.getType().equals(Object.class)) {
+            if (isBlanked(wrapper, name, path.name(index))) {
+                path.collapseKeys(index);
+            }
+            path.mapIndex(index);
+            if (path.isLastNode(index)) {
+                wrapper.setPropertyValue(path.toString(), BLANK);
+            } else {
+                String next = path.prefix(index + 1);
+                if (wrapper.getPropertyValue(next) == null) {
+                    wrapper.setPropertyValue(next, new LinkedHashMap<String, Object>());
+                }
+            }
+        }
+        return initializePath(wrapper, path, index);
+    }
+
+    private boolean isMapValueStringType(TypeDescriptor descriptor) {
+        if (descriptor == null || descriptor.getMapValueTypeDescriptor() == null) {
+            return false;
+        }
+        if (Properties.class.isAssignableFrom(descriptor.getObjectType())) {
+            // Properties is declared as Map<Object,Object> but we know it's really
+            // Map<String,String>
+            return true;
+        }
+        Class<?> valueType = descriptor.getMapValueTypeDescriptor().getObjectType();
+        return (valueType != null && CharSequence.class.isAssignableFrom(valueType));
+    }
+
+    @SuppressWarnings("rawtypes")
+    private boolean isBlanked(BeanWrapper wrapper, String propertyName, String key) {
+        Object value = (wrapper.isReadableProperty(propertyName) ? wrapper.getPropertyValue(propertyName) : null);
+        if (value instanceof Map) {
+            if (((Map) value).get(key) == BLANK) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private void extendCollectionIfNecessary(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) {
+        String name = path.prefix(index);
+        TypeDescriptor elementDescriptor = wrapper.getPropertyTypeDescriptor(name).getElementTypeDescriptor();
+        if (!elementDescriptor.isMap() && !elementDescriptor.isCollection()
+            && !elementDescriptor.getType().equals(Object.class)) {
+            return;
+        }
+        Object extend = new LinkedHashMap<String, Object>();
+        if (!elementDescriptor.isMap() && path.isArrayIndex(index)) {
+            extend = new ArrayList<Object>();
+        }
+        wrapper.setPropertyValue(path.prefix(index + 1), extend);
+    }
+
+    private void extendMapIfNecessary(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) {
+        String name = path.prefix(index);
+        TypeDescriptor parent = wrapper.getPropertyTypeDescriptor(name);
+        if (parent == null) {
+            return;
+        }
+        TypeDescriptor descriptor = parent.getMapValueTypeDescriptor();
+        if (descriptor == null) {
+            descriptor = TypeDescriptor.valueOf(Object.class);
+        }
+        if (!descriptor.isMap() && !descriptor.isCollection() && !descriptor.getType().equals(Object.class)) {
+            return;
+        }
+        String extensionName = path.prefix(index + 1);
+        if (wrapper.isReadableProperty(extensionName)) {
+            Object currentValue = wrapper.getPropertyValue(extensionName);
+            if ((descriptor.isCollection() && currentValue instanceof Collection)
+                || (!descriptor.isCollection() && currentValue instanceof Map)) {
+                return;
+            }
+        }
+        Object extend = new LinkedHashMap<String, Object>();
+        if (descriptor.isCollection()) {
+            extend = new ArrayList<Object>();
+        }
+        if (descriptor.getType().equals(Object.class) && path.isLastNode(index)) {
+            extend = BLANK;
+        }
+        wrapper.setPropertyValue(extensionName, extend);
+    }
+
+    private String getActualPropertyName(BeanWrapper target, String prefix, String name) {
+        String propertyName = resolvePropertyName(target, prefix, name);
+        if (propertyName == null) {
+            propertyName = resolveNestedPropertyName(target, prefix, name);
+        }
+        return (propertyName != null ? propertyName : name);
+    }
+
+    private String resolveNestedPropertyName(BeanWrapper target, String prefix, String name) {
+        StringBuilder candidate = new StringBuilder();
+        for (String field : name.split("[_\\-\\.]")) {
+            candidate.append(candidate.length() > 0 ? "." : "");
+            candidate.append(field);
+            String nested = resolvePropertyName(target, prefix, candidate.toString());
+            if (nested != null) {
+                Class<?> type = target.getPropertyType(nested);
+                if ((type != null) && Map.class.isAssignableFrom(type)) {
+                    // Special case for map property (gh-3836).
+                    return nested + "[" + name.substring(candidate.length() + 1) + "]";
+                }
+                String propertyName = resolvePropertyName(target,
+                    joinString(prefix, nested),
+                    name.substring(candidate.length() + 1));
+                if (propertyName != null) {
+                    return joinString(nested, propertyName);
+                }
+            }
+        }
+        return null;
+    }
+
+    private String resolvePropertyName(BeanWrapper target, String prefix, String name) {
+        Iterable<String> names = getNameAndAliases(name);
+        for (String nameOrAlias : names) {
+            for (String candidate : new RelaxedNames(nameOrAlias)) {
+                try {
+                    if (target.getPropertyType(joinString(prefix, candidate)) != null) {
+                        return candidate;
+                    }
+                } catch (InvalidPropertyException ex) {
+                    // swallow and continue
+                }
+            }
+        }
+        return null;
+    }
+
+    private String joinString(String prefix, String name) {
+        return (StringUtils.hasLength(prefix) ? prefix + "." + name : name);
+    }
+
+    private Iterable<String> getNameAndAliases(String name) {
+        List<String> aliases = this.nameAliases.get(name);
+        if (aliases == null) {
+            return Collections.singleton(name);
+        }
+        List<String> nameAndAliases = new ArrayList<String>(aliases.size() + 1);
+        nameAndAliases.add(name);
+        nameAndAliases.addAll(aliases);
+        return nameAndAliases;
+    }
+
+    private static Object wrapTarget(Object target) {
+        if (target instanceof Map) {
+            @SuppressWarnings("unchecked")
+            Map<String, Object> map = (Map<String, Object>) target;
+            target = new RelaxedDataBinder.MapHolder(map);
+        }
+        return target;
+    }
+
+    @Override
+    public void registerCustomEditor(Class<?> requiredType, PropertyEditor propertyEditor) {
+        if (propertyEditor == null || !EXCLUDED_EDITORS.contains(propertyEditor.getClass())) {
+            super.registerCustomEditor(requiredType, propertyEditor);
+        }
+    }
+
+    @Override
+    public void registerCustomEditor(Class<?> requiredType, String field, PropertyEditor propertyEditor) {
+        if (propertyEditor == null || !EXCLUDED_EDITORS.contains(propertyEditor.getClass())) {
+            super.registerCustomEditor(requiredType, field, propertyEditor);
+        }
+    }
+
+    /**
+     * Holder to allow Map targets to be bound.
+     */
+    static class MapHolder {
+
+        private Map<String, Object> map;
+
+        MapHolder(Map<String, Object> map){
+            this.map = map;
+        }
+
+        public void setMap(Map<String, Object> map) {
+            this.map = map;
+        }
+
+        public Map<String, Object> getMap() {
+            return this.map;
+        }
+
+    }
+
+    /**
+     * A path though properties of a bean.
+     */
+    private static class BeanPath {
+
+        private List<PathNode> nodes;
+
+        BeanPath(String path){
+            this.nodes = splitPath(path);
+        }
+
+        public List<String> prefixes() {
+            List<String> prefixes = new ArrayList<String>();
+            for (int index = 1; index < this.nodes.size(); index++) {
+                prefixes.add(prefix(index));
+            }
+            return prefixes;
+        }
+
+        public boolean isLastNode(int index) {
+            return index >= this.nodes.size() - 1;
+        }
+
+        private List<PathNode> splitPath(String path) {
+            List<PathNode> nodes = new ArrayList<PathNode>();
+            String current = extractIndexedPaths(path, nodes);
+            for (String name : StringUtils.delimitedListToStringArray(current, ".")) {
+                if (StringUtils.hasText(name)) {
+                    nodes.add(new RelaxedDataBinder.BeanPath.PropertyNode(name));
+                }
+            }
+            return nodes;
+        }
+
+        private String extractIndexedPaths(String path, List<PathNode> nodes) {
+            int startRef = path.indexOf("[");
+            String current = path;
+            while (startRef >= 0) {
+                if (startRef > 0) {
+                    nodes.addAll(splitPath(current.substring(0, startRef)));
+                }
+                int endRef = current.indexOf("]", startRef);
+                if (endRef > 0) {
+                    String sub = current.substring(startRef + 1, endRef);
+                    if (sub.matches("[0-9]+")) {
+                        nodes.add(new RelaxedDataBinder.BeanPath.ArrayIndexNode(sub));
+                    } else {
+                        nodes.add(new RelaxedDataBinder.BeanPath.MapIndexNode(sub));
+                    }
+                }
+                current = current.substring(endRef + 1);
+                startRef = current.indexOf("[");
+            }
+            return current;
+        }
+
+        public void collapseKeys(int index) {
+            List<PathNode> revised = new ArrayList<PathNode>();
+            for (int i = 0; i < index; i++) {
+                revised.add(this.nodes.get(i));
+            }
+            StringBuilder builder = new StringBuilder();
+            for (int i = index; i < this.nodes.size(); i++) {
+                if (i > index) {
+                    builder.append(".");
+                }
+                builder.append(this.nodes.get(i).name);
+            }
+            revised.add(new RelaxedDataBinder.BeanPath.PropertyNode(builder.toString()));
+            this.nodes = revised;
+        }
+
+        public void mapIndex(int index) {
+            RelaxedDataBinder.BeanPath.PathNode node = this.nodes.get(index);
+            if (node instanceof RelaxedDataBinder.BeanPath.PropertyNode) {
+                node = ((RelaxedDataBinder.BeanPath.PropertyNode) node).mapIndex();
+            }
+            this.nodes.set(index, node);
+        }
+
+        public String prefix(int index) {
+            return range(0, index);
+        }
+
+        public void rename(int index, String name) {
+            this.nodes.get(index).name = name;
+        }
+
+        public String name(int index) {
+            if (index < this.nodes.size()) {
+                return this.nodes.get(index).name;
+            }
+            return null;
+        }
+
+        private String range(int start, int end) {
+            StringBuilder builder = new StringBuilder();
+            for (int i = start; i < end; i++) {
+                RelaxedDataBinder.BeanPath.PathNode node = this.nodes.get(i);
+                builder.append(node);
+            }
+            if (builder.toString().startsWith(("."))) {
+                builder.replace(0, 1, "");
+            }
+            return builder.toString();
+        }
+
+        public boolean isArrayIndex(int index) {
+            return this.nodes.get(index) instanceof RelaxedDataBinder.BeanPath.ArrayIndexNode;
+        }
+
+        public boolean isProperty(int index) {
+            return this.nodes.get(index) instanceof RelaxedDataBinder.BeanPath.PropertyNode;
+        }
+
+        @Override
+        public String toString() {
+            return prefix(this.nodes.size());
+        }
+
+        private static class PathNode {
+
+            protected String name;
+
+            PathNode(String name){
+                this.name = name;
+            }
+
+        }
+
+        private static class ArrayIndexNode extends RelaxedDataBinder.BeanPath.PathNode {
+
+            ArrayIndexNode(String name){
+                super(name);
+            }
+
+            @Override
+            public String toString() {
+                return "[" + this.name + "]";
+            }
+
+        }
+
+        private static class MapIndexNode extends RelaxedDataBinder.BeanPath.PathNode {
+
+            MapIndexNode(String name){
+                super(name);
+            }
+
+            @Override
+            public String toString() {
+                return "[" + this.name + "]";
+            }
+
+        }
+
+        private static class PropertyNode extends RelaxedDataBinder.BeanPath.PathNode {
+
+            PropertyNode(String name){
+                super(name);
+            }
+
+            public RelaxedDataBinder.BeanPath.MapIndexNode mapIndex() {
+                return new RelaxedDataBinder.BeanPath.MapIndexNode(this.name);
+            }
+
+            @Override
+            public String toString() {
+                return "." + this.name;
+            }
+
+        }
+
+    }
+
+    /**
+     * Extended version of {@link BeanPropertyBindingResult} to support relaxed
+     * binding.
+     */
+    private static class RelaxedBeanPropertyBindingResult extends BeanPropertyBindingResult {
+
+        private RelaxedConversionService conversionService;
+
+        RelaxedBeanPropertyBindingResult(Object target, String objectName, boolean autoGrowNestedPaths,
+                                         int autoGrowCollectionLimit, ConversionService conversionService){
+            super(target, objectName, autoGrowNestedPaths, autoGrowCollectionLimit);
+            this.conversionService = new RelaxedConversionService(conversionService);
+        }
+
+        @Override
+        protected BeanWrapper createBeanWrapper() {
+            BeanWrapper beanWrapper = new RelaxedDataBinder.RelaxedBeanWrapper(getTarget());
+            beanWrapper.setConversionService(this.conversionService);
+            beanWrapper.registerCustomEditor(InetAddress.class, new InetAddressEditor());
+            return beanWrapper;
+        }
+
+    }
+
+    /**
+     * Extended version of {@link BeanWrapperImpl} to support relaxed binding.
+     */
+    private static class RelaxedBeanWrapper extends BeanWrapperImpl {
+
+        private static final Set<String> BENIGN_PROPERTY_SOURCE_NAMES;
+
+        static {
+            Set<String> names = new HashSet<String>();
+            names.add(StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME);
+            names.add(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME);
+            BENIGN_PROPERTY_SOURCE_NAMES = Collections.unmodifiableSet(names);
+        }
+
+        RelaxedBeanWrapper(Object target){
+            super(target);
+        }
+
+        @Override
+        public void setPropertyValue(PropertyValue pv) throws BeansException {
+            try {
+                super.setPropertyValue(pv);
+            } catch (NotWritablePropertyException ex) {
+                PropertyOrigin origin = OriginCapablePropertyValue.getOrigin(pv);
+                if (isBenign(origin)) {
+                    logger.debug("Ignoring benign property binding failure", ex);
+                    return;
+                }
+                if (origin == null) {
+                    throw ex;
+                }
+                throw new RelaxedBindingNotWritablePropertyException(ex, origin);
+            }
+        }
+
+        private boolean isBenign(PropertyOrigin origin) {
+            String name = (origin != null ? origin.getSource().getName() : null);
+            return BENIGN_PROPERTY_SOURCE_NAMES.contains(name);
+        }
+
+    }
+
+    public static class RelaxedBindingNotWritablePropertyException extends NotWritablePropertyException {
+
+        private final String         message;
+
+        private final PropertyOrigin propertyOrigin;
+
+        RelaxedBindingNotWritablePropertyException(NotWritablePropertyException ex, PropertyOrigin propertyOrigin){
+            super(ex.getBeanClass(), ex.getPropertyName());
+            this.propertyOrigin = propertyOrigin;
+            this.message = "Failed to bind '" + propertyOrigin.getName() + "' from '"
+                           + propertyOrigin.getSource().getName() + "' to '" + ex.getPropertyName() + "' property on '"
+                           + ex.getBeanClass().getName() + "'";
+        }
+
+        @Override
+        public String getMessage() {
+            return this.message;
+        }
+
+        public PropertyOrigin getPropertyOrigin() {
+            return this.propertyOrigin;
+        }
+
+    }
+}

+ 241 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedNames.java

@@ -0,0 +1,241 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.Locale;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.springframework.util.StringUtils;
+
+/**
+ * Generates relaxed name variations from a given source.
+ *
+ * @author Phillip Webb
+ * @author Dave Syer
+ * @see RelaxedDataBinder
+ */
+public final class RelaxedNames implements Iterable<String> {
+
+    private static final Pattern CAMEL_CASE_PATTERN              = Pattern.compile("([^A-Z-])([A-Z])");
+
+    private static final Pattern SEPARATED_TO_CAMEL_CASE_PATTERN = Pattern.compile("[_\\-.]");
+
+    private final String         name;
+
+    private final Set<String>    values                          = new LinkedHashSet<String>();
+
+    /**
+     * Create a new {@link RelaxedNames} instance.
+     *
+     * @param name the source name. For the maximum number of variations specify the
+     *     name using dashed notation (e.g. {@literal my-property-name}
+     */
+    public RelaxedNames(String name){
+        this.name = (name != null ? name : "");
+        initialize(RelaxedNames.this.name, this.values);
+    }
+
+    @Override
+    public Iterator<String> iterator() {
+        return this.values.iterator();
+    }
+
+    private void initialize(String name, Set<String> values) {
+        if (values.contains(name)) {
+            return;
+        }
+        for (RelaxedNames.Variation variation : RelaxedNames.Variation.values()) {
+            for (RelaxedNames.Manipulation manipulation : RelaxedNames.Manipulation.values()) {
+                String result = name;
+                result = manipulation.apply(result);
+                result = variation.apply(result);
+                values.add(result);
+                initialize(result, values);
+            }
+        }
+    }
+
+    /**
+     * Name variations.
+     */
+    enum Variation {
+
+                    NONE {
+
+                        @Override
+                        public String apply(String value) {
+                            return value;
+                        }
+
+                    },
+
+                    LOWERCASE {
+
+                        @Override
+                        public String apply(String value) {
+                            return (value.isEmpty() ? value : value.toLowerCase(Locale.ENGLISH));
+                        }
+
+                    },
+
+                    UPPERCASE {
+
+                        @Override
+                        public String apply(String value) {
+                            return (value.isEmpty() ? value : value.toUpperCase(Locale.ENGLISH));
+                        }
+
+                    };
+
+        public abstract String apply(String value);
+
+    }
+
+    /**
+     * Name manipulations.
+     */
+    enum Manipulation {
+
+                       NONE {
+
+                           @Override
+                           public String apply(String value) {
+                               return value;
+                           }
+
+                       },
+
+                       HYPHEN_TO_UNDERSCORE {
+
+                           @Override
+                           public String apply(String value) {
+                               return (value.indexOf('-') != -1 ? value.replace('-', '_') : value);
+                           }
+
+                       },
+
+                       UNDERSCORE_TO_PERIOD {
+
+                           @Override
+                           public String apply(String value) {
+                               return (value.indexOf('_') != -1 ? value.replace('_', '.') : value);
+                           }
+
+                       },
+
+                       PERIOD_TO_UNDERSCORE {
+
+                           @Override
+                           public String apply(String value) {
+                               return (value.indexOf('.') != -1 ? value.replace('.', '_') : value);
+                           }
+
+                       },
+
+                       CAMELCASE_TO_UNDERSCORE {
+
+                           @Override
+                           public String apply(String value) {
+                               if (value.isEmpty()) {
+                                   return value;
+                               }
+                               Matcher matcher = CAMEL_CASE_PATTERN.matcher(value);
+                               if (!matcher.find()) {
+                                   return value;
+                               }
+                               matcher = matcher.reset();
+                               StringBuffer result = new StringBuffer();
+                               while (matcher.find()) {
+                                   matcher.appendReplacement(result,
+                                       matcher.group(1) + '_' + StringUtils.uncapitalize(matcher.group(2)));
+                               }
+                               matcher.appendTail(result);
+                               return result.toString();
+                           }
+
+                       },
+
+                       CAMELCASE_TO_HYPHEN {
+
+                           @Override
+                           public String apply(String value) {
+                               if (value.isEmpty()) {
+                                   return value;
+                               }
+                               Matcher matcher = CAMEL_CASE_PATTERN.matcher(value);
+                               if (!matcher.find()) {
+                                   return value;
+                               }
+                               matcher = matcher.reset();
+                               StringBuffer result = new StringBuffer();
+                               while (matcher.find()) {
+                                   matcher.appendReplacement(result,
+                                       matcher.group(1) + '-' + StringUtils.uncapitalize(matcher.group(2)));
+                               }
+                               matcher.appendTail(result);
+                               return result.toString();
+                           }
+
+                       },
+
+                       SEPARATED_TO_CAMELCASE {
+
+                           @Override
+                           public String apply(String value) {
+                               return separatedToCamelCase(value, false);
+                           }
+
+                       },
+
+                       CASE_INSENSITIVE_SEPARATED_TO_CAMELCASE {
+
+                           @Override
+                           public String apply(String value) {
+                               return separatedToCamelCase(value, true);
+                           }
+
+                       };
+
+        private static final char[] SUFFIXES = new char[] { '_', '-', '.' };
+
+        public abstract String apply(String value);
+
+        private static String separatedToCamelCase(String value, boolean caseInsensitive) {
+            if (value.isEmpty()) {
+                return value;
+            }
+            StringBuilder builder = new StringBuilder();
+            for (String field : SEPARATED_TO_CAMEL_CASE_PATTERN.split(value)) {
+                field = (caseInsensitive ? field.toLowerCase(Locale.ENGLISH) : field);
+                builder.append(builder.length() != 0 ? StringUtils.capitalize(field) : field);
+            }
+            char lastChar = value.charAt(value.length() - 1);
+            for (char suffix : SUFFIXES) {
+                if (lastChar == suffix) {
+                    builder.append(suffix);
+                    break;
+                }
+            }
+            return builder.toString();
+        }
+
+    }
+
+    /**
+     * Return a {@link RelaxedNames} for the given source camelCase source name.
+     *
+     * @param name the source name in camelCase
+     * @return the relaxed names
+     */
+    public static RelaxedNames forCamelCase(String name) {
+        StringBuilder result = new StringBuilder();
+        for (char c : name.toCharArray()) {
+            result.append(Character.isUpperCase(c) && result.length() > 0
+                          && result.charAt(result.length() - 1) != '-' ? "-" + Character.toLowerCase(c) : c);
+        }
+        return new RelaxedNames(result.toString());
+    }
+
+}

+ 17 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/StringToCharArrayConverter.java

@@ -0,0 +1,17 @@
+package com.alibaba.otter.canal.client.adapter.config.bind;
+
+import org.springframework.core.convert.converter.Converter;
+
+/**
+ * Converts a String to a Char Array.
+ *
+ * @author Phillip Webb
+ */
+class StringToCharArrayConverter implements Converter<String, char[]> {
+
+    @Override
+    public char[] convert(String source) {
+        return source.toCharArray();
+    }
+
+}

+ 203 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/AbstractResource.java

@@ -0,0 +1,203 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.springframework.util.Assert;
+import org.springframework.util.ResourceUtils;
+
+/**
+ * Convenience base class for {@link Resource} implementations, pre-implementing
+ * typical behavior.
+ * <p>
+ * The "exists" method will check whether a File or InputStream can be opened;
+ * "isOpen" will always return false; "getURL" and "getFile" throw an exception;
+ * and "toString" will return the description.
+ *
+ * @author Juergen Hoeller
+ * @since 28.12.2003
+ */
+public abstract class AbstractResource implements Resource {
+
+    /**
+     * This implementation checks whether a File can be opened, falling back to
+     * whether an InputStream can be opened. This will cover both directories and
+     * content resources.
+     */
+    @Override
+    public boolean exists() {
+        // Try file existence: can we find the file in the file system?
+        try {
+            return getFile().exists();
+        } catch (IOException ex) {
+            // Fall back to stream existence: can we open the stream?
+            try {
+                InputStream is = getInputStream();
+                is.close();
+                return true;
+            } catch (Throwable isEx) {
+                return false;
+            }
+        }
+    }
+
+    /**
+     * This implementation always returns {@code true}.
+     */
+    @Override
+    public boolean isReadable() {
+        return true;
+    }
+
+    /**
+     * This implementation always returns {@code false}.
+     */
+    @Override
+    public boolean isOpen() {
+        return false;
+    }
+
+    /**
+     * This implementation throws a FileNotFoundException, assuming that the
+     * resource cannot be resolved to a URL.
+     */
+    @Override
+    public URL getURL() throws IOException {
+        throw new FileNotFoundException(getDescription() + " cannot be resolved to URL");
+    }
+
+    /**
+     * This implementation builds a URI based on the URL returned by
+     * {@link #getURL()}.
+     */
+    @Override
+    public URI getURI() throws IOException {
+        URL url = getURL();
+        try {
+            return ResourceUtils.toURI(url);
+        } catch (URISyntaxException ex) {
+            throw new RuntimeException("Invalid URI [" + url + "]", ex);
+        }
+    }
+
+    /**
+     * This implementation throws a FileNotFoundException, assuming that the
+     * resource cannot be resolved to an absolute file path.
+     */
+    @Override
+    public File getFile() throws IOException {
+        throw new FileNotFoundException(getDescription() + " cannot be resolved to absolute file path");
+    }
+
+    /**
+     * This implementation reads the entire InputStream to calculate the content
+     * length. Subclasses will almost always be able to provide a more optimal
+     * version of this, e.g. checking a File length.
+     *
+     * @see #getInputStream()
+     */
+    @Override
+    public long contentLength() throws IOException {
+        InputStream is = getInputStream();
+        Assert.state(is != null, "Resource InputStream must not be null");
+        try {
+            long size = 0;
+            byte[] buf = new byte[255];
+            int read;
+            while ((read = is.read(buf)) != -1) {
+                size += read;
+            }
+            return size;
+        } finally {
+            try {
+                is.close();
+            } catch (IOException ex) {
+            }
+        }
+    }
+
+    /**
+     * This implementation checks the timestamp of the underlying File, if
+     * available.
+     *
+     * @see #getFileForLastModifiedCheck()
+     */
+    @Override
+    public long lastModified() throws IOException {
+        long lastModified = getFileForLastModifiedCheck().lastModified();
+        if (lastModified == 0L) {
+            throw new FileNotFoundException(
+                getDescription() + " cannot be resolved in the file system for resolving its last-modified timestamp");
+        }
+        return lastModified;
+    }
+
+    /**
+     * Determine the File to use for timestamp checking.
+     * <p>
+     * The default implementation delegates to {@link #getFile()}.
+     *
+     * @return the File to use for timestamp checking (never {@code null})
+     * @throws FileNotFoundException if the resource cannot be resolved as an
+     *     absolute file path, i.e. is not available in a file system
+     * @throws IOException in case of general resolution/reading failures
+     */
+    protected File getFileForLastModifiedCheck() throws IOException {
+        return getFile();
+    }
+
+    /**
+     * This implementation throws a FileNotFoundException, assuming that relative
+     * resources cannot be created for this resource.
+     */
+    @Override
+    public org.springframework.core.io.Resource createRelative(String relativePath) throws IOException {
+        throw new FileNotFoundException("Cannot create a relative resource for " + getDescription());
+    }
+
+    /**
+     * This implementation always returns {@code null}, assuming that this resource
+     * type does not have a filename.
+     */
+    @Override
+    public String getFilename() {
+        return null;
+    }
+
+    /**
+     * This implementation returns the description of this resource.
+     *
+     * @see #getDescription()
+     */
+    @Override
+    public String toString() {
+        return getDescription();
+    }
+
+    /**
+     * This implementation compares description strings.
+     *
+     * @see #getDescription()
+     */
+    @Override
+    public boolean equals(Object obj) {
+        return (obj == this
+                || (obj instanceof org.springframework.core.io.Resource
+                    && ((org.springframework.core.io.Resource) obj).getDescription().equals(getDescription())));
+    }
+
+    /**
+     * This implementation returns the description's hash code.
+     *
+     * @see #getDescription()
+     */
+    @Override
+    public int hashCode() {
+        return getDescription().hashCode();
+    }
+}

+ 117 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java

@@ -0,0 +1,117 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.core.io.Resource;
+import org.springframework.util.Assert;
+
+/**
+ * {@link Resource} implementation for a given byte array.
+ * <p>
+ * Creates a {@link ByteArrayInputStream} for the given byte array.
+ * <p>
+ * Useful for loading content from any given byte array, without having to
+ * resort to a single-use {@link InputStreamResource}. Particularly useful for
+ * creating mail attachments from local content, where JavaMail needs to be able
+ * to read the stream multiple times.
+ *
+ * @author Juergen Hoeller
+ * @author Sam Brannen
+ * @see ByteArrayInputStream
+ * @see InputStreamResource
+ * @since 1.2.3
+ */
+public class ByteArrayResource extends AbstractResource {
+
+    private final byte[] byteArray;
+
+    private final String description;
+
+    /**
+     * Create a new {@code ByteArrayResource}.
+     *
+     * @param byteArray the byte array to wrap
+     */
+    public ByteArrayResource(byte[] byteArray){
+        this(byteArray, "resource loaded from byte array");
+    }
+
+    /**
+     * Create a new {@code ByteArrayResource} with a description.
+     *
+     * @param byteArray the byte array to wrap
+     * @param description where the byte array comes from
+     */
+    public ByteArrayResource(byte[] byteArray, String description){
+        Assert.notNull(byteArray, "Byte array must not be null");
+        this.byteArray = byteArray;
+        this.description = (description != null ? description : "");
+    }
+
+    /**
+     * Return the underlying byte array.
+     */
+    public final byte[] getByteArray() {
+        return this.byteArray;
+    }
+
+    /**
+     * This implementation always returns {@code true}.
+     */
+    @Override
+    public boolean exists() {
+        return true;
+    }
+
+    /**
+     * This implementation returns the length of the underlying byte array.
+     */
+    @Override
+    public long contentLength() {
+        return this.byteArray.length;
+    }
+
+    /**
+     * This implementation returns a ByteArrayInputStream for the underlying byte
+     * array.
+     *
+     * @see ByteArrayInputStream
+     */
+    @Override
+    public InputStream getInputStream() throws IOException {
+        return new ByteArrayInputStream(this.byteArray);
+    }
+
+    /**
+     * This implementation returns a description that includes the passed-in
+     * {@code description}, if any.
+     */
+    @Override
+    public String getDescription() {
+        return "Byte array resource [" + this.description + "]";
+    }
+
+    /**
+     * This implementation compares the underlying byte array.
+     *
+     * @see Arrays#equals(byte[], byte[])
+     */
+    @Override
+    public boolean equals(Object obj) {
+        return (obj == this || (obj instanceof org.springframework.core.io.ByteArrayResource
+                                && Arrays.equals(((ByteArrayResource) obj).byteArray, this.byteArray)));
+    }
+
+    /**
+     * This implementation returns the hash code based on the underlying byte array.
+     */
+    @Override
+    public int hashCode() {
+        return (byte[].class.hashCode() * 29 * this.byteArray.length);
+    }
+
+}

+ 107 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/CompositePropertySource.java

@@ -0,0 +1,107 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.util.*;
+
+import org.springframework.util.StringUtils;
+
+/**
+ * Composite {@link PropertySource} implementation that iterates over a set of
+ * {@link PropertySource} instances. Necessary in cases where multiple property
+ * sources share the same name, e.g. when multiple values are supplied to
+ * {@code @PropertySource}.
+ * <p>
+ * As of Spring 4.1.2, this class extends {@link EnumerablePropertySource}
+ * instead of plain {@link PropertySource}, exposing {@link #getPropertyNames()}
+ * based on the accumulated property names from all contained sources (as far as
+ * possible).
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @author Phillip Webb
+ * @since 3.1.1
+ */
+public class CompositePropertySource extends EnumerablePropertySource<Object> {
+
+    private final Set<PropertySource<?>> propertySources = new LinkedHashSet<PropertySource<?>>();
+
+    /**
+     * Create a new {@code CompositePropertySource}.
+     *
+     * @param name the name of the property source
+     */
+    public CompositePropertySource(String name){
+        super(name);
+    }
+
+    @Override
+    public Object getProperty(String name) {
+        for (PropertySource<?> propertySource : this.propertySources) {
+            Object candidate = propertySource.getProperty(name);
+            if (candidate != null) {
+                return candidate;
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public boolean containsProperty(String name) {
+        for (PropertySource<?> propertySource : this.propertySources) {
+            if (propertySource.containsProperty(name)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public String[] getPropertyNames() {
+        Set<String> names = new LinkedHashSet<String>();
+        for (PropertySource<?> propertySource : this.propertySources) {
+            if (!(propertySource instanceof EnumerablePropertySource)) {
+                throw new IllegalStateException(
+                    "Failed to enumerate property names due to non-enumerable property source: " + propertySource);
+            }
+            names.addAll(Arrays.asList(((EnumerablePropertySource<?>) propertySource).getPropertyNames()));
+        }
+        return StringUtils.toStringArray(names);
+    }
+
+    /**
+     * Add the given {@link PropertySource} to the end of the chain.
+     *
+     * @param propertySource the PropertySource to add
+     */
+    public void addPropertySource(PropertySource<?> propertySource) {
+        this.propertySources.add(propertySource);
+    }
+
+    /**
+     * Add the given {@link PropertySource} to the start of the chain.
+     *
+     * @param propertySource the PropertySource to add
+     * @since 4.1
+     */
+    public void addFirstPropertySource(PropertySource<?> propertySource) {
+        List<PropertySource<?>> existing = new ArrayList<PropertySource<?>>(this.propertySources);
+        this.propertySources.clear();
+        this.propertySources.add(propertySource);
+        this.propertySources.addAll(existing);
+    }
+
+    /**
+     * Return all property sources that this composite source holds.
+     *
+     * @since 4.1.1
+     */
+    public Collection<PropertySource<?>> getPropertySources() {
+        return this.propertySources;
+    }
+
+    @Override
+    public String toString() {
+        return String
+            .format("%s [name='%s', propertySources=%s]", getClass().getSimpleName(), this.name, this.propertySources);
+    }
+
+}

+ 58 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/EnumerablePropertySource.java

@@ -0,0 +1,58 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import org.springframework.util.ObjectUtils;
+
+/**
+ * A {@link PropertySource} implementation capable of interrogating its
+ * underlying source object to enumerate all possible property name/value pairs.
+ * Exposes the {@link #getPropertyNames()} method to allow callers to introspect
+ * available properties without having to access the underlying source object.
+ * This also facilitates a more efficient implementation of
+ * {@link #containsProperty(String)}, in that it can call
+ * {@link #getPropertyNames()} and iterate through the returned array rather
+ * than attempting a call to {@link #getProperty(String)} which may be more
+ * expensive. Implementations may consider caching the result of
+ * {@link #getPropertyNames()} to fully exploit this performance opportunity.
+ * <p>
+ * Most framework-provided {@code PropertySource} implementations are
+ * enumerable; a counter-example would be {@code JndiPropertySource} where, due
+ * to the nature of JNDI it is not possible to determine all possible property
+ * names at any given time; rather it is only possible to try to access a
+ * property (via {@link #getProperty(String)}) in order to evaluate whether it
+ * is present or not.
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @since 3.1
+ */
+public abstract class EnumerablePropertySource<T> extends PropertySource<T> {
+
+    public EnumerablePropertySource(String name, T source){
+        super(name, source);
+    }
+
+    protected EnumerablePropertySource(String name){
+        super(name);
+    }
+
+    /**
+     * Return whether this {@code PropertySource} contains a property with the given
+     * name.
+     * <p>
+     * This implementation checks for the presence of the given name within the
+     * {@link #getPropertyNames()} array.
+     *
+     * @param name the name of the property to find
+     */
+    @Override
+    public boolean containsProperty(String name) {
+        return ObjectUtils.containsElement(getPropertyNames(), name);
+    }
+
+    /**
+     * Return the names of all properties contained by the {@linkplain #getSource()
+     * source} object (never {@code null}).
+     */
+    public abstract String[] getPropertyNames();
+
+}

+ 38 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MapPropertySource.java

@@ -0,0 +1,38 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.util.Map;
+
+import org.springframework.core.env.PropertiesPropertySource;
+import org.springframework.core.env.PropertySource;
+import org.springframework.util.StringUtils;
+
+/**
+ * {@link PropertySource} that reads keys and values from a {@code Map} object.
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @since 3.1
+ * @see PropertiesPropertySource
+ */
+public class MapPropertySource extends EnumerablePropertySource<Map<String, Object>> {
+
+    public MapPropertySource(String name, Map<String, Object> source){
+        super(name, source);
+    }
+
+    @Override
+    public Object getProperty(String name) {
+        return this.source.get(name);
+    }
+
+    @Override
+    public boolean containsProperty(String name) {
+        return this.source.containsKey(name);
+    }
+
+    @Override
+    public String[] getPropertyNames() {
+        return StringUtils.toStringArray(this.source.keySet());
+    }
+
+}

+ 221 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MutablePropertySources.java

@@ -0,0 +1,221 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.core.env.PropertyResolver;
+import org.springframework.core.env.PropertySourcesPropertyResolver;
+
+/**
+ * Default implementation of the {@link PropertySources} interface. Allows
+ * manipulation of contained property sources and provides a constructor for
+ * copying an existing {@code PropertySources} instance.
+ * <p>
+ * Where <em>precedence</em> is mentioned in methods such as {@link #addFirst}
+ * and {@link #addLast}, this is with regard to the order in which property
+ * sources will be searched when resolving a given property with a
+ * {@link PropertyResolver}.
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @see PropertySourcesPropertyResolver
+ * @since 3.1
+ */
+public class MutablePropertySources implements PropertySources {
+
+    private final Log                     logger;
+
+    private final List<PropertySource<?>> propertySourceList = new CopyOnWriteArrayList<PropertySource<?>>();
+
+    /**
+     * Create a new {@link MutablePropertySources}
+     * object.
+     */
+    public MutablePropertySources(){
+        this.logger = LogFactory.getLog(getClass());
+    }
+
+    /**
+     * Create a new {@code MutablePropertySources} from the given propertySources
+     * object, preserving the original order of contained {@code PropertySource}
+     * objects.
+     */
+    public MutablePropertySources(PropertySources propertySources){
+        this();
+        for (PropertySource<?> propertySource : propertySources) {
+            addLast(propertySource);
+        }
+    }
+
+    /**
+     * Create a new {@link MutablePropertySources}
+     * object and inherit the given logger, usually from an enclosing
+     * {@link Environment}.
+     */
+    MutablePropertySources(Log logger){
+        this.logger = logger;
+    }
+
+    @Override
+    public boolean contains(String name) {
+        return this.propertySourceList.contains(PropertySource.named(name));
+    }
+
+    @Override
+    public PropertySource<?> get(String name) {
+        int index = this.propertySourceList.indexOf(PropertySource.named(name));
+        return (index != -1 ? this.propertySourceList.get(index) : null);
+    }
+
+    @Override
+    public Iterator<PropertySource<?>> iterator() {
+        return this.propertySourceList.iterator();
+    }
+
+    /**
+     * Add the given property source object with highest precedence.
+     */
+    public void addFirst(PropertySource<?> propertySource) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Adding PropertySource '" + propertySource.getName() + "' with highest search precedence");
+        }
+        removeIfPresent(propertySource);
+        this.propertySourceList.add(0, propertySource);
+    }
+
+    /**
+     * Add the given property source object with lowest precedence.
+     */
+    public void addLast(PropertySource<?> propertySource) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Adding PropertySource '" + propertySource.getName() + "' with lowest search precedence");
+        }
+        removeIfPresent(propertySource);
+        this.propertySourceList.add(propertySource);
+    }
+
+    /**
+     * Add the given property source object with precedence immediately higher than
+     * the named relative property source.
+     */
+    public void addBefore(String relativePropertySourceName, PropertySource<?> propertySource) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Adding PropertySource '" + propertySource.getName()
+                         + "' with search precedence immediately higher than '" + relativePropertySourceName + "'");
+        }
+        assertLegalRelativeAddition(relativePropertySourceName, propertySource);
+        removeIfPresent(propertySource);
+        int index = assertPresentAndGetIndex(relativePropertySourceName);
+        addAtIndex(index, propertySource);
+    }
+
+    /**
+     * Add the given property source object with precedence immediately lower than
+     * the named relative property source.
+     */
+    public void addAfter(String relativePropertySourceName, PropertySource<?> propertySource) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Adding PropertySource '" + propertySource.getName()
+                         + "' with search precedence immediately lower than '" + relativePropertySourceName + "'");
+        }
+        assertLegalRelativeAddition(relativePropertySourceName, propertySource);
+        removeIfPresent(propertySource);
+        int index = assertPresentAndGetIndex(relativePropertySourceName);
+        addAtIndex(index + 1, propertySource);
+    }
+
+    /**
+     * Return the precedence of the given property source, {@code -1} if not found.
+     */
+    public int precedenceOf(PropertySource<?> propertySource) {
+        return this.propertySourceList.indexOf(propertySource);
+    }
+
+    /**
+     * Remove and return the property source with the given name, {@code null} if
+     * not found.
+     *
+     * @param name the name of the property source to find and remove
+     */
+    public PropertySource<?> remove(String name) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Removing PropertySource '" + name + "'");
+        }
+        int index = this.propertySourceList.indexOf(PropertySource.named(name));
+        return (index != -1 ? this.propertySourceList.remove(index) : null);
+    }
+
+    /**
+     * Replace the property source with the given name with the given property
+     * source object.
+     *
+     * @param name the name of the property source to find and replace
+     * @param propertySource the replacement property source
+     * @throws IllegalArgumentException if no property source with the given name is
+     *     present
+     * @see #contains
+     */
+    public void replace(String name, PropertySource<?> propertySource) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("Replacing PropertySource '" + name + "' with '" + propertySource.getName() + "'");
+        }
+        int index = assertPresentAndGetIndex(name);
+        this.propertySourceList.set(index, propertySource);
+    }
+
+    /**
+     * Return the number of {@link PropertySource} objects contained.
+     */
+    public int size() {
+        return this.propertySourceList.size();
+    }
+
+    @Override
+    public String toString() {
+        return this.propertySourceList.toString();
+    }
+
+    /**
+     * Ensure that the given property source is not being added relative to itself.
+     */
+    protected void assertLegalRelativeAddition(String relativePropertySourceName, PropertySource<?> propertySource) {
+        String newPropertySourceName = propertySource.getName();
+        if (relativePropertySourceName.equals(newPropertySourceName)) {
+            throw new IllegalArgumentException(
+                "PropertySource named '" + newPropertySourceName + "' cannot be added relative to itself");
+        }
+    }
+
+    /**
+     * Remove the given property source if it is present.
+     */
+    protected void removeIfPresent(PropertySource<?> propertySource) {
+        this.propertySourceList.remove(propertySource);
+    }
+
+    /**
+     * Add the given property source at a particular index in the list.
+     */
+    private void addAtIndex(int index, PropertySource<?> propertySource) {
+        removeIfPresent(propertySource);
+        this.propertySourceList.add(index, propertySource);
+    }
+
+    /**
+     * Assert that the named property source is present and return its index.
+     *
+     * @param name {@linkplain PropertySource#getName() name of the property source}
+     *     to find
+     * @throws IllegalArgumentException if the named property source is not present
+     */
+    private int assertPresentAndGetIndex(String name) {
+        int index = this.propertySourceList.indexOf(PropertySource.named(name));
+        if (index == -1) {
+            throw new IllegalArgumentException("PropertySource named '" + name + "' does not exist");
+        }
+        return index;
+    }
+}

+ 34 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertiesPropertySource.java

@@ -0,0 +1,34 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.springframework.core.env.PropertySource;
+
+/**
+ * {@link PropertySource} implementation that extracts properties from a
+ * {@link Properties} object.
+ * <p>
+ * Note that because a {@code Properties} object is technically an
+ * {@code <Object, Object>} {@link java.util.Hashtable Hashtable}, one may
+ * contain non-{@code String} keys or values. This implementation, however is
+ * restricted to accessing only {@code String}-based keys and values, in the
+ * same fashion as {@link Properties#getProperty} and
+ * {@link Properties#setProperty}.
+ *
+ * @author Chris Beams
+ * @author Juergen Hoeller
+ * @since 3.1
+ */
+public class PropertiesPropertySource extends MapPropertySource {
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public PropertiesPropertySource(String name, Properties source){
+        super(name, (Map) source);
+    }
+
+    protected PropertiesPropertySource(String name, Map<String, Object> source){
+        super(name, source);
+    }
+
+}

+ 239 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySource.java

@@ -0,0 +1,239 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.util.Assert;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * Abstract base class representing a source of name/value property pairs. The
+ * underlying {@linkplain #getSource() source object} may be of any type
+ * {@code T} that encapsulates properties. Examples include
+ * {@link java.util.Properties} objects, {@link java.util.Map} objects,
+ * {@code ServletContext} and {@code ServletConfig} objects (for access to init
+ * parameters). Explore the {@code PropertySource} type hierarchy to see
+ * provided implementations.
+ * <p>
+ * {@code PropertySource} objects are not typically used in isolation, but
+ * rather through a {@link PropertySources} object, which aggregates property
+ * sources and in conjunction with a {@link PropertyResolver} implementation
+ * that can perform precedence-based searches across the set of
+ * {@code PropertySources}.
+ * <p>
+ * {@code PropertySource} identity is determined not based on the content of
+ * encapsulated properties, but rather based on the {@link #getName() name} of
+ * the {@code PropertySource} alone. This is useful for manipulating
+ * {@code PropertySource} objects when in collection contexts. See operations in
+ * {@link MutablePropertySources} as well as the {@link #named(String)} and
+ * {@link #toString()} methods for details.
+ * <p>
+ * Note that when working
+ * with @{@link org.springframework.context.annotation.Configuration
+ * Configuration} classes that the @{@link PropertySource PropertySource}
+ * annotation provides a convenient and declarative way of adding property
+ * sources to the enclosing {@code Environment}.
+ *
+ * @author Chris Beams
+ * @since 3.1
+ * @see PropertySources
+ * @see MutablePropertySources
+ * @see PropertySource
+ */
+public abstract class PropertySource<T> {
+
+    protected final Log    logger = LogFactory.getLog(getClass());
+
+    protected final String name;
+
+    protected final T      source;
+
+    /**
+     * Create a new {@code PropertySource} with the given name and source object.
+     */
+    public PropertySource(String name, T source){
+        Assert.hasText(name, "Property source name must contain at least one character");
+        Assert.notNull(source, "Property source must not be null");
+        this.name = name;
+        this.source = source;
+    }
+
+    /**
+     * Create a new {@code PropertySource} with the given name and with a new
+     * {@code Object} instance as the underlying source.
+     * <p>
+     * Often useful in testing scenarios when creating anonymous implementations
+     * that never query an actual source but rather return hard-coded values.
+     */
+    @SuppressWarnings("unchecked")
+    public PropertySource(String name){
+        this(name, (T) new Object());
+    }
+
+    /**
+     * Return the name of this {@code PropertySource}
+     */
+    public String getName() {
+        return this.name;
+    }
+
+    /**
+     * Return the underlying source object for this {@code PropertySource}.
+     */
+    public T getSource() {
+        return this.source;
+    }
+
+    /**
+     * Return whether this {@code PropertySource} contains the given name.
+     * <p>
+     * This implementation simply checks for a {@code null} return value from
+     * {@link #getProperty(String)}. Subclasses may wish to implement a more
+     * efficient algorithm if possible.
+     *
+     * @param name the property name to find
+     */
+    public boolean containsProperty(String name) {
+        return (getProperty(name) != null);
+    }
+
+    /**
+     * Return the value associated with the given name, or {@code null} if not
+     * found.
+     *
+     * @param name the property to find
+     */
+    public abstract Object getProperty(String name);
+
+    /**
+     * This {@code PropertySource} object is equal to the given object if:
+     * <ul>
+     * <li>they are the same instance
+     * <li>the {@code name} properties for both objects are equal
+     * </ul>
+     * <p>
+     * No properties other than {@code name} are evaluated.
+     */
+    @Override
+    public boolean equals(Object obj) {
+        return (this == obj || (obj instanceof PropertySource
+                                && ObjectUtils.nullSafeEquals(this.name, ((PropertySource<?>) obj).name)));
+    }
+
+    /**
+     * Return a hash code derived from the {@code name} property of this
+     * {@code PropertySource} object.
+     */
+    @Override
+    public int hashCode() {
+        return ObjectUtils.nullSafeHashCode(this.name);
+    }
+
+    /**
+     * Produce concise output (type and name) if the current log level does not
+     * include debug. If debug is enabled, produce verbose output including the hash
+     * code of the PropertySource instance and every name/value property pair.
+     * <p>
+     * This variable verbosity is useful as a property source such as system
+     * properties or environment variables may contain an arbitrary number of
+     * property pairs, potentially leading to difficult to read exception and log
+     * messages.
+     *
+     * @see Log#isDebugEnabled()
+     */
+    @Override
+    public String toString() {
+        if (logger.isDebugEnabled()) {
+            return getClass().getSimpleName() + "@" + System.identityHashCode(this) + " {name='" + this.name
+                   + "', properties=" + this.source + "}";
+        } else {
+            return getClass().getSimpleName() + " {name='" + this.name + "'}";
+        }
+    }
+
+    /**
+     * Return a {@code PropertySource} implementation intended for collection
+     * comparison purposes only.
+     * <p>
+     * Primarily for internal use, but given a collection of {@code PropertySource}
+     * objects, may be used as follows:
+     *
+     * <pre class="code">
+     *
+     * {
+     *     &#64;code
+     *     List<PropertySource<?>> sources = new ArrayList<PropertySource<?>>();
+     *     sources.add(new MapPropertySource("sourceA", mapA));
+     *     sources.add(new MapPropertySource("sourceB", mapB));
+     *     assert sources.contains(PropertySource.named("sourceA"));
+     *     assert sources.contains(PropertySource.named("sourceB"));
+     *     assert !sources.contains(PropertySource.named("sourceC"));
+     * }
+     * </pre>
+     *
+     * The returned {@code PropertySource} will throw
+     * {@code UnsupportedOperationException} if any methods other than
+     * {@code equals(Object)}, {@code hashCode()}, and {@code toString()} are
+     * called.
+     *
+     * @param name the name of the comparison {@code PropertySource} to be created
+     *     and returned.
+     */
+    public static PropertySource<?> named(String name) {
+        return new ComparisonPropertySource(name);
+    }
+
+    /**
+     * {@code PropertySource} to be used as a placeholder in cases where an actual
+     * property source cannot be eagerly initialized at application context creation
+     * time. For example, a {@code ServletContext}-based property source must wait
+     * until the {@code ServletContext} object is available to its enclosing
+     * {@code ApplicationContext}. In such cases, a stub should be used to hold the
+     * intended default position/order of the property source, then be replaced
+     * during context refresh.
+     *
+     * @see org.springframework.web.context.support.StandardServletEnvironment
+     * @see org.springframework.web.context.support.ServletContextPropertySource
+     */
+    public static class StubPropertySource extends PropertySource<Object> {
+
+        public StubPropertySource(String name){
+            super(name, new Object());
+        }
+
+        /**
+         * Always returns {@code null}.
+         */
+        @Override
+        public String getProperty(String name) {
+            return null;
+        }
+    }
+
+    /**
+     * @see PropertySource#named(String)
+     */
+    static class ComparisonPropertySource extends StubPropertySource {
+
+        private static final String USAGE_ERROR = "ComparisonPropertySource instances are for use with collection comparison only";
+
+        public ComparisonPropertySource(String name){
+            super(name);
+        }
+
+        @Override
+        public Object getSource() {
+            throw new UnsupportedOperationException(USAGE_ERROR);
+        }
+
+        @Override
+        public boolean containsProperty(String name) {
+            throw new UnsupportedOperationException(USAGE_ERROR);
+        }
+
+        @Override
+        public String getProperty(String name) {
+            throw new UnsupportedOperationException(USAGE_ERROR);
+        }
+    }
+
+}

+ 35 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySourceLoader.java

@@ -0,0 +1,35 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.IOException;
+
+import org.springframework.core.io.support.SpringFactoriesLoader;
+
+/**
+ * Strategy interface located via {@link SpringFactoriesLoader} and used to load
+ * a {@link PropertySource}.
+ *
+ * @author Dave Syer
+ * @author Phillip Webb
+ */
+public interface PropertySourceLoader {
+
+    /**
+     * Returns the file extensions that the loader supports (excluding the '.').
+     *
+     * @return the file extensions
+     */
+    String[] getFileExtensions();
+
+    /**
+     * Load the resource into a property source.
+     *
+     * @param name the name of the property source
+     * @param resource the resource to load
+     * @param profile the name of the profile to load or {@code null}. The profile
+     *     can be used to load multi-document files (such as YAML). Simple property
+     *     formats should {@code null} when asked to load a profile.
+     * @return a property source or {@code null}
+     * @throws IOException if the source cannot be loaded
+     */
+    PropertySource<?> load(String name, Resource resource, String profile) throws IOException;
+}

+ 25 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySources.java

@@ -0,0 +1,25 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+/**
+ * Holder containing one or more {@link PropertySource} objects.
+ *
+ * @author Chris Beams
+ * @since 3.1
+ */
+public interface PropertySources extends Iterable<PropertySource<?>> {
+
+    /**
+     * Return whether a property source with the given name is contained.
+     *
+     * @param name the {@linkplain PropertySource#getName() name of the property source} to find
+     */
+    boolean contains(String name);
+
+    /**
+     * Return the property source with the given name, {@code null} if not found.
+     *
+     * @param name the {@linkplain PropertySource#getName() name of the property source} to find
+     */
+    PropertySource<?> get(String name);
+
+}

+ 57 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/Resource.java

@@ -0,0 +1,57 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URL;
+
+import org.springframework.core.io.*;
+import org.springframework.core.io.ByteArrayResource;
+
+/**
+ * Interface for a resource descriptor that abstracts from the actual type of
+ * underlying resource, such as a file or class path resource.
+ * <p>
+ * An InputStream can be opened for every resource if it exists in physical
+ * form, but a URL or File handle can just be returned for certain resources.
+ * The actual behavior is implementation-specific.
+ *
+ * @author Juergen Hoeller
+ * @since 28.12.2003
+ * @see #getInputStream()
+ * @see #getURL()
+ * @see #getURI()
+ * @see #getFile()
+ * @see WritableResource
+ * @see ContextResource
+ * @see UrlResource
+ * @see ClassPathResource
+ * @see FileSystemResource
+ * @see PathResource
+ * @see ByteArrayResource
+ * @see InputStreamResource
+ */
+public interface Resource extends InputStreamSource {
+
+    boolean exists();
+
+    boolean isReadable();
+
+    boolean isOpen();
+
+    URL getURL() throws IOException;
+
+    URI getURI() throws IOException;
+
+    File getFile() throws IOException;
+
+    long contentLength() throws IOException;
+
+    long lastModified() throws IOException;
+
+    org.springframework.core.io.Resource createRelative(String var1) throws IOException;
+
+    String getFilename();
+
+    String getDescription();
+}

+ 182 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/SpringProfileDocumentMatcher.java

@@ -0,0 +1,182 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.util.*;
+
+import org.springframework.core.env.Environment;
+import org.springframework.util.CollectionUtils;
+import org.springframework.util.StringUtils;
+
+/**
+ * {@link YamlProcessor.DocumentMatcher} backed by
+ * {@link Environment#getActiveProfiles()}. A YAML document may define a
+ * "spring.profiles" element as a comma-separated list of Spring profile names,
+ * optionally negated using the {@code !} character. If both negated and
+ * non-negated profiles are specified for a single document, at least one
+ * non-negated profile must match and no negated profiles may match.
+ *
+ * @author Dave Syer
+ * @author Matt Benson
+ * @author Phillip Webb
+ * @author Andy Wilkinson
+ */
+public class SpringProfileDocumentMatcher implements YamlProcessor.DocumentMatcher {
+
+    private String[] activeProfiles = new String[0];
+
+    public SpringProfileDocumentMatcher(){
+    }
+
+    public SpringProfileDocumentMatcher(String... profiles){
+        addActiveProfiles(profiles);
+    }
+
+    public void addActiveProfiles(String... profiles) {
+        LinkedHashSet<String> set = new LinkedHashSet<String>(Arrays.asList(this.activeProfiles));
+        Collections.addAll(set, profiles);
+        this.activeProfiles = set.toArray(new String[set.size()]);
+    }
+
+    @Override
+    public YamlProcessor.MatchStatus matches(Properties properties) {
+        List<String> profiles = extractSpringProfiles(properties);
+        ProfilesMatcher profilesMatcher = getProfilesMatcher();
+        Set<String> negative = extractProfiles(profiles, ProfileType.NEGATIVE);
+        Set<String> positive = extractProfiles(profiles, ProfileType.POSITIVE);
+        if (!CollectionUtils.isEmpty(negative)) {
+            if (profilesMatcher.matches(negative) == YamlProcessor.MatchStatus.FOUND) {
+                return YamlProcessor.MatchStatus.NOT_FOUND;
+            }
+            if (CollectionUtils.isEmpty(positive)) {
+                return YamlProcessor.MatchStatus.FOUND;
+            }
+        }
+        return profilesMatcher.matches(positive);
+    }
+
+    private List<String> extractSpringProfiles(Properties properties) {
+        SpringProperties springProperties = new SpringProperties();
+        MutablePropertySources propertySources = new MutablePropertySources();
+        propertySources.addFirst(new PropertiesPropertySource("profiles", properties));
+        // PropertyValues propertyValues = new PropertySourcesPropertyValues(
+        // propertySources);
+        // new RelaxedDataBinder(springProperties, "spring").bind(propertyValues);
+        // TODO
+        List<String> profiles = springProperties.getProfiles();
+        return profiles;
+    }
+
+    private ProfilesMatcher getProfilesMatcher() {
+        return (this.activeProfiles.length != 0 ? new ActiveProfilesMatcher(
+            new HashSet<String>(Arrays.asList(this.activeProfiles))) : new EmptyProfilesMatcher());
+    }
+
+    private Set<String> extractProfiles(List<String> profiles, ProfileType type) {
+        if (CollectionUtils.isEmpty(profiles)) {
+            return null;
+        }
+        Set<String> extractedProfiles = new HashSet<String>();
+        for (String candidate : profiles) {
+            ProfileType candidateType = ProfileType.POSITIVE;
+            if (candidate.startsWith("!")) {
+                candidateType = ProfileType.NEGATIVE;
+            }
+            if (candidateType == type) {
+                extractedProfiles.add(type != ProfileType.POSITIVE ? candidate.substring(1) : candidate);
+            }
+        }
+        return extractedProfiles;
+    }
+
+    /**
+     * Profile match types.
+     */
+    enum ProfileType {
+
+                      POSITIVE, NEGATIVE
+
+    }
+
+    /**
+     * Base class for profile matchers.
+     */
+    private abstract static class ProfilesMatcher {
+
+        public final YamlProcessor.MatchStatus matches(Set<String> profiles) {
+            if (CollectionUtils.isEmpty(profiles)) {
+                return YamlProcessor.MatchStatus.ABSTAIN;
+            }
+            return doMatches(profiles);
+        }
+
+        protected abstract YamlProcessor.MatchStatus doMatches(Set<String> profiles);
+
+    }
+
+    /**
+     * {@link ProfilesMatcher} that matches when a value in {@code spring.profiles}
+     * is also in {@code spring.profiles.active}.
+     */
+    private static class ActiveProfilesMatcher extends ProfilesMatcher {
+
+        private final Set<String> activeProfiles;
+
+        ActiveProfilesMatcher(Set<String> activeProfiles){
+            this.activeProfiles = activeProfiles;
+        }
+
+        @Override
+        protected YamlProcessor.MatchStatus doMatches(Set<String> profiles) {
+            if (profiles.isEmpty()) {
+                return YamlProcessor.MatchStatus.NOT_FOUND;
+            }
+            for (String activeProfile : this.activeProfiles) {
+                if (profiles.contains(activeProfile)) {
+                    return YamlProcessor.MatchStatus.FOUND;
+                }
+            }
+            return YamlProcessor.MatchStatus.NOT_FOUND;
+        }
+
+    }
+
+    /**
+     * {@link ProfilesMatcher} that matches when {@code
+     * spring.profiles} is empty or contains a value with no text.
+     *
+     * @see StringUtils#hasText(String)
+     */
+    private static class EmptyProfilesMatcher extends ProfilesMatcher {
+
+        @Override
+        public YamlProcessor.MatchStatus doMatches(Set<String> springProfiles) {
+            if (springProfiles.isEmpty()) {
+                return YamlProcessor.MatchStatus.FOUND;
+            }
+            for (String profile : springProfiles) {
+                if (!StringUtils.hasText(profile)) {
+                    return YamlProcessor.MatchStatus.FOUND;
+                }
+            }
+            return YamlProcessor.MatchStatus.NOT_FOUND;
+        }
+
+    }
+
+    /**
+     * Class for binding {@code spring.profiles} property.
+     */
+    static class SpringProperties {
+
+        private List<String> profiles = new ArrayList<String>();
+
+        public List<String> getProfiles() {
+            return this.profiles;
+        }
+
+        public void setProfiles(List<String> profiles) {
+            this.profiles = profiles;
+        }
+
+    }
+
+}

+ 419 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java

@@ -0,0 +1,419 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.util.Assert;
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.constructor.Constructor;
+import org.yaml.snakeyaml.nodes.MappingNode;
+import org.yaml.snakeyaml.parser.ParserException;
+import org.yaml.snakeyaml.reader.UnicodeReader;
+
+/**
+ * Base class for YAML factories.
+ *
+ * @author Dave Syer
+ * @author Juergen Hoeller
+ * @since 4.1
+ */
+public abstract class YamlProcessor {
+
+    private final Log             logger           = LogFactory.getLog(getClass());
+
+    private ResolutionMethod      resolutionMethod = ResolutionMethod.OVERRIDE;
+
+    private Resource[]            resources        = new Resource[0];
+
+    private List<DocumentMatcher> documentMatchers = Collections.emptyList();
+
+    private boolean               matchDefault     = true;
+
+    /**
+     * A map of document matchers allowing callers to selectively use only some of
+     * the documents in a YAML resource. In YAML documents are separated by
+     * <code>---<code> lines, and each document is converted to properties before
+     * the match is made. E.g.
+     *
+     * <pre class="code">
+     * environment: dev
+     * url: http://dev.bar.com
+     * name: Developer Setup
+     * ---
+     * environment: prod
+     * url:http://foo.bar.com
+     * name: My Cool App
+     * </pre>
+     *
+     * when mapped with
+     *
+     * <pre class="code">
+     * setDocumentMatchers(properties -> ("prod"
+     *     .equals(properties.getProperty("environment")) ? MatchStatus.FOUND : MatchStatus.NOT_FOUND));
+     * </pre>
+     *
+     * would end up as
+     *
+     * <pre class="code">
+     * environment=prod
+     * url=http://foo.bar.com
+     * name=My Cool App
+     * </pre>
+     */
+    public void setDocumentMatchers(DocumentMatcher... matchers) {
+        this.documentMatchers = Arrays.asList(matchers);
+    }
+
+    /**
+     * Flag indicating that a document for which all the
+     * {@link #setDocumentMatchers(DocumentMatcher...) document matchers} abstain
+     * will nevertheless match. Default is {@code true}.
+     */
+    public void setMatchDefault(boolean matchDefault) {
+        this.matchDefault = matchDefault;
+    }
+
+    /**
+     * Method to use for resolving resources. Each resource will be converted to a
+     * Map, so this property is used to decide which map entries to keep in the
+     * final output from this factory. Default is {@link ResolutionMethod#OVERRIDE}.
+     */
+    public void setResolutionMethod(ResolutionMethod resolutionMethod) {
+        Assert.notNull(resolutionMethod, "ResolutionMethod must not be null");
+        this.resolutionMethod = resolutionMethod;
+    }
+
+    /**
+     * Set locations of YAML {@link Resource resources} to be loaded.
+     *
+     * @see ResolutionMethod
+     */
+    public void setResources(Resource... resources) {
+        this.resources = resources;
+    }
+
+    /**
+     * Provide an opportunity for subclasses to process the Yaml parsed from the
+     * supplied resources. Each resource is parsed in turn and the documents inside
+     * checked against the {@link #setDocumentMatchers(DocumentMatcher...)
+     * matchers}. If a document matches it is passed into the callback, along with
+     * its representation as Properties. Depending on the
+     * {@link #setResolutionMethod(ResolutionMethod)} not all of the documents will
+     * be parsed.
+     *
+     * @param callback a callback to delegate to once matching documents are found
+     * @see #createYaml()
+     */
+    protected void process(MatchCallback callback) {
+        Yaml yaml = createYaml();
+        for (Resource resource : this.resources) {
+            boolean found = process(callback, yaml, resource);
+            if (this.resolutionMethod == ResolutionMethod.FIRST_FOUND && found) {
+                return;
+            }
+        }
+    }
+
+    /**
+     * Create the {@link Yaml} instance to use.
+     */
+    protected Yaml createYaml() {
+        return new Yaml(new StrictMapAppenderConstructor());
+    }
+
+    private boolean process(MatchCallback callback, Yaml yaml, Resource resource) {
+        int count = 0;
+        try {
+            if (logger.isDebugEnabled()) {
+                logger.debug("Loading from YAML: " + resource);
+            }
+            Reader reader = new UnicodeReader(resource.getInputStream());
+            try {
+                for (Object object : yaml.loadAll(reader)) {
+                    if (object != null && process(asMap(object), callback)) {
+                        count++;
+                        if (this.resolutionMethod == ResolutionMethod.FIRST_FOUND) {
+                            break;
+                        }
+                    }
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug(
+                        "Loaded " + count + " document" + (count > 1 ? "s" : "") + " from YAML resource: " + resource);
+                }
+            } finally {
+                reader.close();
+            }
+        } catch (IOException ex) {
+            handleProcessError(resource, ex);
+        }
+        return (count > 0);
+    }
+
+    private void handleProcessError(Resource resource, IOException ex) {
+        if (this.resolutionMethod != ResolutionMethod.FIRST_FOUND
+            && this.resolutionMethod != ResolutionMethod.OVERRIDE_AND_IGNORE) {
+            throw new IllegalStateException(ex);
+        }
+        if (logger.isWarnEnabled()) {
+            logger.warn("Could not load map from " + resource + ": " + ex.getMessage());
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private Map<String, Object> asMap(Object object) {
+        // YAML can have numbers as keys
+        Map<String, Object> result = new LinkedHashMap<String, Object>();
+        if (!(object instanceof Map)) {
+            // A document can be a text literal
+            result.put("document", object);
+            return result;
+        }
+
+        Map<Object, Object> map = (Map<Object, Object>) object;
+        for (Map.Entry<Object, Object> entry : map.entrySet()) {
+            Object value = entry.getValue();
+            if (value instanceof Map) {
+                value = asMap(value);
+            }
+            Object key = entry.getKey();
+            if (key instanceof CharSequence) {
+                result.put(key.toString(), value);
+            } else {
+                // It has to be a map key in this case
+                result.put("[" + key.toString() + "]", value);
+            }
+        }
+        return result;
+    }
+
+    private boolean process(Map<String, Object> map, MatchCallback callback) {
+        Properties properties = new Properties() {
+
+            @Override
+            public String getProperty(String key) {
+                Object value = get(key);
+                return (value != null ? value.toString() : null);
+            }
+        };
+        properties.putAll(getFlattenedMap(map));
+
+        if (this.documentMatchers.isEmpty()) {
+            if (logger.isDebugEnabled()) {
+                logger.debug("Merging document (no matchers set): " + map);
+            }
+            callback.process(properties, map);
+            return true;
+        }
+
+        MatchStatus result = MatchStatus.ABSTAIN;
+        for (DocumentMatcher matcher : this.documentMatchers) {
+            MatchStatus match = matcher.matches(properties);
+            result = MatchStatus.getMostSpecific(match, result);
+            if (match == MatchStatus.FOUND) {
+                if (logger.isDebugEnabled()) {
+                    logger.debug("Matched document with document matcher: " + properties);
+                }
+                callback.process(properties, map);
+                return true;
+            }
+        }
+
+        if (result == MatchStatus.ABSTAIN && this.matchDefault) {
+            if (logger.isDebugEnabled()) {
+                logger.debug("Matched document with default matcher: " + map);
+            }
+            callback.process(properties, map);
+            return true;
+        }
+
+        if (logger.isDebugEnabled()) {
+            logger.debug("Unmatched document: " + map);
+        }
+        return false;
+    }
+
+    /**
+     * Return a flattened version of the given map, recursively following any nested
+     * Map or Collection values. Entries from the resulting map retain the same
+     * order as the source. When called with the Map from a {@link MatchCallback}
+     * the result will contain the same values as the {@link MatchCallback}
+     * Properties.
+     *
+     * @param source the source map
+     * @return a flattened map
+     * @since 4.1.3
+     */
+    protected final Map<String, Object> getFlattenedMap(Map<String, Object> source) {
+        Map<String, Object> result = new LinkedHashMap<String, Object>();
+        buildFlattenedMap(result, source, null);
+        return result;
+    }
+
+    private static boolean containsText(CharSequence str) {
+        int strLen = str.length();
+        for (int i = 0; i < strLen; i++) {
+            if (!Character.isWhitespace(str.charAt(i))) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private void buildFlattenedMap(Map<String, Object> result, Map<String, Object> source, String path) {
+        for (Map.Entry<String, Object> entry : source.entrySet()) {
+            String key = entry.getKey();
+            if (path != null && !path.isEmpty() && containsText(path)) {
+                if (key.startsWith("[")) {
+                    key = path + key;
+                } else {
+                    key = path + '.' + key;
+                }
+            }
+            Object value = entry.getValue();
+            if (value instanceof String) {
+                result.put(key, value);
+            } else if (value instanceof Map) {
+                // Need a compound key
+                @SuppressWarnings("unchecked")
+                Map<String, Object> map = (Map<String, Object>) value;
+                buildFlattenedMap(result, map, key);
+            } else if (value instanceof Collection) {
+                // Need a compound key
+                @SuppressWarnings("unchecked")
+                Collection<Object> collection = (Collection<Object>) value;
+                int count = 0;
+                for (Object object : collection) {
+                    buildFlattenedMap(result, Collections.singletonMap("[" + (count++) + "]", object), key);
+                }
+            } else {
+                result.put(key, (value != null ? value : ""));
+            }
+        }
+    }
+
+    /**
+     * Callback interface used to process the YAML parsing results.
+     */
+    public interface MatchCallback {
+
+        /**
+         * Process the given representation of the parsing results.
+         *
+         * @param properties the properties to process (as a flattened representation
+         *     with indexed keys in case of a collection or map)
+         * @param map the result map (preserving the original value structure in the
+         *     YAML document)
+         */
+        void process(Properties properties, Map<String, Object> map);
+    }
+
+    /**
+     * Strategy interface used to test if properties match.
+     */
+    public interface DocumentMatcher {
+
+        /**
+         * Test if the given properties match.
+         *
+         * @param properties the properties to test
+         * @return the status of the match
+         */
+        MatchStatus matches(Properties properties);
+    }
+
+    /**
+     * Status returned from {@link DocumentMatcher#matches(Properties)}
+     */
+    public enum MatchStatus {
+
+                             /**
+                              * A match was found.
+                              */
+                             FOUND,
+
+                             /**
+                              * No match was found.
+                              */
+                             NOT_FOUND,
+
+                             /**
+                              * The matcher should not be considered.
+                              */
+                             ABSTAIN;
+
+        /**
+         * Compare two {@link MatchStatus} items, returning the most specific status.
+         */
+        public static MatchStatus getMostSpecific(MatchStatus a, MatchStatus b) {
+            return (a.ordinal() < b.ordinal() ? a : b);
+        }
+    }
+
+    /**
+     * Method to use for resolving resources.
+     */
+    public enum ResolutionMethod {
+
+                                  /**
+                                   * Replace values from earlier in the list.
+                                   */
+                                  OVERRIDE,
+
+                                  /**
+                                   * Replace values from earlier in the list, ignoring any failures.
+                                   */
+                                  OVERRIDE_AND_IGNORE,
+
+                                  /**
+                                   * Take the first resource in the list that exists and use just that.
+                                   */
+                                  FIRST_FOUND
+    }
+
+    /**
+     * A specialized {@link Constructor} that checks for duplicate keys.
+     */
+    protected static class StrictMapAppenderConstructor extends Constructor {
+
+        // Declared as public for use in subclasses
+        public StrictMapAppenderConstructor(){
+            super();
+        }
+
+        @Override
+        protected Map<Object, Object> constructMapping(MappingNode node) {
+            try {
+                return super.constructMapping(node);
+            } catch (IllegalStateException ex) {
+                throw new ParserException("while parsing MappingNode",
+                    node.getStartMark(),
+                    ex.getMessage(),
+                    node.getEndMark());
+            }
+        }
+
+        @Override
+        protected Map<Object, Object> createDefaultMap() {
+            final Map<Object, Object> delegate = super.createDefaultMap();
+            return new AbstractMap<Object, Object>() {
+
+                @Override
+                public Object put(Object key, Object value) {
+                    if (delegate.containsKey(key)) {
+                        throw new IllegalStateException("Duplicate key: " + key);
+                    }
+                    return delegate.put(key, value);
+                }
+
+                @Override
+                public Set<Entry<Object, Object>> entrySet() {
+                    return delegate.entrySet();
+                }
+            };
+        }
+    }
+}

+ 87 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlPropertySourceLoader.java

@@ -0,0 +1,87 @@
+package com.alibaba.otter.canal.client.adapter.config.common;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.regex.Pattern;
+
+import org.springframework.util.ClassUtils;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.nodes.Tag;
+import org.yaml.snakeyaml.representer.Representer;
+import org.yaml.snakeyaml.resolver.Resolver;
+
+/**
+ * Strategy to load '.yml' (or '.yaml') files into a {@link PropertySource}.
+ *
+ * @author Dave Syer
+ * @author Phillip Webb
+ * @author Andy Wilkinson
+ */
+public class YamlPropertySourceLoader implements PropertySourceLoader {
+
+    @Override
+    public String[] getFileExtensions() {
+        return new String[] { "yml", "yaml" };
+    }
+
+    @Override
+    public PropertySource<?> load(String name, Resource resource, String profile) throws IOException {
+        if (ClassUtils.isPresent("org.yaml.snakeyaml.Yaml", null)) {
+            Processor processor = new Processor(resource, profile);
+            Map<String, Object> source = processor.process();
+            if (!source.isEmpty()) {
+                return new MapPropertySource(name, source);
+            }
+        }
+        return null;
+    }
+
+    /**
+     * {@link YamlProcessor} to create a {@link Map} containing the property values.
+     * Similar to {@link YamlPropertiesFactoryBean} but retains the order of
+     * entries.
+     */
+    private static class Processor extends YamlProcessor {
+
+        Processor(Resource resource, String profile){
+            if (profile == null) {
+                setMatchDefault(true);
+                setDocumentMatchers(new SpringProfileDocumentMatcher());
+            } else {
+                setMatchDefault(false);
+                setDocumentMatchers(new SpringProfileDocumentMatcher(profile));
+            }
+            setResources(resource);
+        }
+
+        @Override
+        protected Yaml createYaml() {
+            return new Yaml(new StrictMapAppenderConstructor(), new Representer(), new DumperOptions(), new Resolver() {
+
+                @Override
+                public void addImplicitResolver(Tag tag, Pattern regexp, String first) {
+                    if (tag == Tag.TIMESTAMP) {
+                        return;
+                    }
+                    super.addImplicitResolver(tag, regexp, first);
+                }
+            });
+        }
+
+        public Map<String, Object> process() {
+            final Map<String, Object> result = new LinkedHashMap<String, Object>();
+            process(new MatchCallback() {
+
+                @Override
+                public void process(Properties properties, Map<String, Object> map) {
+                    result.putAll(getFlattenedMap(map));
+                }
+            });
+            return result;
+        }
+
+    }
+}

+ 9 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Dml.java

@@ -15,6 +15,7 @@ public class Dml implements Serializable {
     private static final long         serialVersionUID = 2611556444074013268L;
 
     private String                    destination;                            // 对应canal的实例或者MQ的topic
+    private String                    groupId;                                // 对应mq的group id
     private String                    database;                               // 数据库或schema
     private String                    table;                                  // 表名
     private List<String>              pkNames;
@@ -36,6 +37,14 @@ public class Dml implements Serializable {
         this.destination = destination;
     }
 
+    public String getGroupId() {
+        return groupId;
+    }
+
+    public void setGroupId(String groupId) {
+        this.groupId = groupId;
+    }
+
     public String getDatabase() {
         return database;
     }

+ 15 - 8
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java

@@ -2,7 +2,12 @@ package com.alibaba.otter.canal.client.adapter.support;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
-import java.sql.*;
+import java.sql.Date;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
 
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
@@ -67,8 +72,14 @@ public class JdbcTypeUtil {
         }
     }
 
+    private static boolean isText(String columnType) {
+        return "LONGTEXT".equalsIgnoreCase(columnType) || "MEDIUMTEXT".equalsIgnoreCase(columnType)
+               || "TEXT".equalsIgnoreCase(columnType) || "TINYTEXT".equalsIgnoreCase(columnType);
+    }
+
     public static Object typeConvert(String columnName, String value, int sqlType, String mysqlType) {
-        if (value == null || value.equals("")) {
+        if (value == null
+            || (value.equals("") && !(isText(mysqlType) || sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.LONGVARCHAR))) {
             return null;
         }
 
@@ -118,10 +129,8 @@ public class JdbcTypeUtil {
                         value = value.trim().replace(" ", "T");
                         DateTime dt = new DateTime(value);
                         res = new Date(dt.toDate().getTime());
-                    } else {
-                        res = null;
+                        break;
                     }
-                    break;
                 case Types.TIME:
                     value = "T" + value;
                     DateTime dt = new DateTime(value);
@@ -132,10 +141,8 @@ public class JdbcTypeUtil {
                         value = value.trim().replace(" ", "T");
                         dt = new DateTime(value);
                         res = new Timestamp(dt.toDate().getTime());
-                    } else {
-                        res = null;
+                        break;
                     }
-                    break;
                 case Types.CLOB:
                 default:
                     res = value;

+ 8 - 4
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MessageUtil.java

@@ -14,7 +14,7 @@ import com.alibaba.otter.canal.protocol.Message;
  */
 public class MessageUtil {
 
-    public static List<Dml> parse4Dml(String destination, Message message) {
+    public static List<Dml> parse4Dml(String destination, String groupId, Message message) {
         if (message == null) {
             return null;
         }
@@ -37,11 +37,14 @@ public class MessageUtil {
             CanalEntry.EventType eventType = rowChange.getEventType();
 
             final Dml dml = new Dml();
+            dml.setIsDdl(rowChange.getIsDdl());
             dml.setDestination(destination);
+            dml.setGroupId(groupId);
             dml.setDatabase(entry.getHeader().getSchemaName());
             dml.setTable(entry.getHeader().getTableName());
             dml.setType(eventType.toString());
             dml.setEs(entry.getHeader().getExecuteTime());
+            dml.setIsDdl(rowChange.getIsDdl());
             dml.setTs(System.currentTimeMillis());
             dml.setSql(rowChange.getSql());
             dmls.add(dml);
@@ -118,10 +121,10 @@ public class MessageUtil {
         return dmls;
     }
 
-    public static List<Dml> flatMessage2Dml(String destination, List<FlatMessage> flatMessages) {
+    public static List<Dml> flatMessage2Dml(String destination, String groupId, List<FlatMessage> flatMessages) {
         List<Dml> dmls = new ArrayList<Dml>(flatMessages.size());
         for (FlatMessage flatMessage : flatMessages) {
-            Dml dml = flatMessage2Dml(destination, flatMessage);
+            Dml dml = flatMessage2Dml(destination, groupId, flatMessage);
             if (dml != null) {
                 dmls.add(dml);
             }
@@ -130,12 +133,13 @@ public class MessageUtil {
         return dmls;
     }
 
-    public static Dml flatMessage2Dml(String destination, FlatMessage flatMessage) {
+    public static Dml flatMessage2Dml(String destination, String groupId, FlatMessage flatMessage) {
         if (flatMessage == null) {
             return null;
         }
         Dml dml = new Dml();
         dml.setDestination(destination);
+        dml.setGroupId(groupId);
         dml.setDatabase(flatMessage.getDatabase());
         dml.setTable(flatMessage.getTable());
         dml.setPkNames(flatMessage.getPkNames());

+ 1 - 7
client-adapter/elasticsearch/pom.xml

@@ -18,12 +18,6 @@
             <version>${project.version}</version>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.yaml</groupId>
-            <artifactId>snakeyaml</artifactId>
-            <version>1.19</version>
-            <scope>provided</scope>
-        </dependency>
         <dependency>
             <groupId>com.alibaba.fastsql</groupId>
             <artifactId>fastsql</artifactId>
@@ -92,4 +86,4 @@
         </plugins>
     </build>
 
-</project>
+</project>

+ 44 - 20
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java

@@ -1,16 +1,17 @@
 package com.alibaba.otter.canal.client.adapter.es;
 
 import java.net.InetAddress;
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import javax.sql.DataSource;
 
+import org.apache.commons.lang.StringUtils;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.common.settings.Settings;
@@ -28,11 +29,7 @@ import com.alibaba.otter.canal.client.adapter.es.monitor.ESConfigMonitor;
 import com.alibaba.otter.canal.client.adapter.es.service.ESEtlService;
 import com.alibaba.otter.canal.client.adapter.es.service.ESSyncService;
 import com.alibaba.otter.canal.client.adapter.es.support.ESTemplate;
-import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
-import com.alibaba.otter.canal.client.adapter.support.Dml;
-import com.alibaba.otter.canal.client.adapter.support.EtlResult;
-import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
-import com.alibaba.otter.canal.client.adapter.support.SPI;
+import com.alibaba.otter.canal.client.adapter.support.*;
 
 /**
  * ES外部适配器
@@ -52,6 +49,8 @@ public class ESAdapter implements OuterAdapter {
 
     private ESConfigMonitor                        esConfigMonitor;
 
+    private Properties                             envProperties;
+
     public TransportClient getTransportClient() {
         return transportClient;
     }
@@ -69,9 +68,10 @@ public class ESAdapter implements OuterAdapter {
     }
 
     @Override
-    public void init(OuterAdapterConfig configuration) {
+    public void init(OuterAdapterConfig configuration, Properties envProperties) {
         try {
-            Map<String, ESSyncConfig> esSyncConfigTmp = ESSyncConfigLoader.load();
+            this.envProperties = envProperties;
+            Map<String, ESSyncConfig> esSyncConfigTmp = ESSyncConfigLoader.load(envProperties);
             // 过滤不匹配的key的配置
             esSyncConfigTmp.forEach((key, config) -> {
                 if ((config.getOuterAdapterKey() == null && configuration.getKey() == null)
@@ -99,8 +99,21 @@ public class ESAdapter implements OuterAdapter {
                 String schema = matcher.group(2);
 
                 schemaItem.getAliasTableItems().values().forEach(tableItem -> {
-                    Map<String, ESSyncConfig> esSyncConfigMap = dbTableEsSyncConfig
-                        .computeIfAbsent(schema + "-" + tableItem.getTableName(), k -> new HashMap<>());
+                    Map<String, ESSyncConfig> esSyncConfigMap;
+                    if (envProperties != null
+                        && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                        esSyncConfigMap = dbTableEsSyncConfig
+                            .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "-"
+                                             + StringUtils.trimToEmpty(config.getGroupId()) + "_" + schema + "-"
+                                             + tableItem.getTableName(),
+                                k -> new ConcurrentHashMap<>());
+                    } else {
+                        esSyncConfigMap = dbTableEsSyncConfig
+                            .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "_" + schema + "-"
+                                             + tableItem.getTableName(),
+                                k -> new ConcurrentHashMap<>());
+                    }
+
                     esSyncConfigMap.put(configName, config);
                 });
             }
@@ -131,15 +144,27 @@ public class ESAdapter implements OuterAdapter {
             return;
         }
         for (Dml dml : dmls) {
-            sync(dml);
+            if (!dml.getIsDdl()) {
+                sync(dml);
+            }
         }
+        esSyncService.commit(); // 批次统一提交
     }
 
-    public void sync(Dml dml) {
+    private void sync(Dml dml) {
         String database = dml.getDatabase();
         String table = dml.getTable();
-        Map<String, ESSyncConfig> configMap = dbTableEsSyncConfig.get(database + "-" + table);
-        if (configMap != null) {
+        Map<String, ESSyncConfig> configMap;
+        if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+            configMap = dbTableEsSyncConfig
+                .get(StringUtils.trimToEmpty(dml.getDestination()) + "-" + StringUtils.trimToEmpty(dml.getGroupId())
+                     + "_" + database + "-" + table);
+        } else {
+            configMap = dbTableEsSyncConfig
+                .get(StringUtils.trimToEmpty(dml.getDestination()) + "_" + database + "-" + table);
+        }
+
+        if (configMap != null && !configMap.values().isEmpty()) {
             esSyncService.sync(configMap.values(), dml);
         }
     }
@@ -152,7 +177,7 @@ public class ESAdapter implements OuterAdapter {
             DataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
             ESEtlService esEtlService = new ESEtlService(transportClient, config);
             if (dataSource != null) {
-                return esEtlService.importData(params, false);
+                return esEtlService.importData(params);
             } else {
                 etlResult.setSucceeded(false);
                 etlResult.setErrorMessage("DataSource not found");
@@ -166,7 +191,7 @@ public class ESAdapter implements OuterAdapter {
                 // 取所有的destination为task的配置
                 if (configTmp.getDestination().equals(task)) {
                     ESEtlService esEtlService = new ESEtlService(transportClient, configTmp);
-                    EtlResult etlRes = esEtlService.importData(params, false);
+                    EtlResult etlRes = esEtlService.importData(params);
                     if (!etlRes.getSucceeded()) {
                         resSuccess = false;
                         resultMsg.append(etlRes.getErrorMessage()).append("\n");
@@ -208,6 +233,9 @@ public class ESAdapter implements OuterAdapter {
 
     @Override
     public void destroy() {
+        if (esConfigMonitor != null) {
+            esConfigMonitor.destroy();
+        }
         if (transportClient != null) {
             transportClient.close();
         }
@@ -215,10 +243,6 @@ public class ESAdapter implements OuterAdapter {
 
     @Override
     public String getDestination(String task) {
-        if (esConfigMonitor != null) {
-            esConfigMonitor.destroy();
-        }
-
         ESSyncConfig config = esSyncConfig.get(task);
         if (config != null) {
             return config.getDestination();

+ 21 - 10
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java

@@ -17,6 +17,8 @@ public class ESSyncConfig {
 
     private String    outerAdapterKey; // adapter key
 
+    private String    groupId;         // group id
+
     private String    destination;     // canal destination
 
     private ESMapping esMapping;
@@ -28,7 +30,7 @@ public class ESSyncConfig {
         if (esMapping._type == null) {
             throw new NullPointerException("esMapping._type");
         }
-        if (esMapping._id == null && esMapping.pk == null) {
+        if (esMapping._id == null && esMapping.getPk() == null) {
             throw new NullPointerException("esMapping._id and esMapping.pk");
         }
         if (esMapping.sql == null) {
@@ -52,6 +54,14 @@ public class ESSyncConfig {
         this.outerAdapterKey = outerAdapterKey;
     }
 
+    public String getGroupId() {
+        return groupId;
+    }
+
+    public void setGroupId(String groupId) {
+        this.groupId = groupId;
+    }
+
     public String getDestination() {
         return destination;
     }
@@ -73,8 +83,9 @@ public class ESSyncConfig {
         private String              _index;
         private String              _type;
         private String              _id;
+        private boolean             upsert          = false;
         private String              pk;
-        private String              parent;
+        // private String parent;
         private String              sql;
         // 对象字段, 例: objFields:
         // - _labels: array:;
@@ -111,20 +122,20 @@ public class ESSyncConfig {
             this._id = _id;
         }
 
-        public String getPk() {
-            return pk;
+        public boolean isUpsert() {
+            return upsert;
         }
 
-        public void setPk(String pk) {
-            this.pk = pk;
+        public void setUpsert(boolean upsert) {
+            this.upsert = upsert;
         }
 
-        public String getParent() {
-            return parent;
+        public String getPk() {
+            return pk;
         }
 
-        public void setParent(String parent) {
-            this.parent = parent;
+        public void setPk(String pk) {
+            this.pk = pk;
         }
 
         public Map<String, String> getObjFields() {

+ 7 - 7
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfigLoader.java

@@ -2,12 +2,12 @@ package com.alibaba.otter.canal.client.adapter.es.config;
 
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Properties;
 
-import com.alibaba.fastjson.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
 
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
 import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
 
 /**
@@ -20,17 +20,17 @@ public class ESSyncConfigLoader {
 
     private static Logger logger = LoggerFactory.getLogger(ESSyncConfigLoader.class);
 
-    @SuppressWarnings("unchecked")
-    public static synchronized Map<String, ESSyncConfig> load() {
+    public static synchronized Map<String, ESSyncConfig> load(Properties envProperties) {
         logger.info("## Start loading es mapping config ... ");
 
         Map<String, ESSyncConfig> esSyncConfig = new LinkedHashMap<>();
 
         Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("es");
         configContentMap.forEach((fileName, content) -> {
-            Map configMap = new Yaml().loadAs(content, Map.class); // yml自带的对象反射不是很稳定
-            JSONObject configJson = new JSONObject(configMap);
-            ESSyncConfig config = configJson.toJavaObject(ESSyncConfig.class);
+            ESSyncConfig config = YmlConfigBinder.bindYmlToObj(null, content, ESSyncConfig.class, null, envProperties);
+            if (config == null) {
+                return;
+            }
             try {
                 config.validate();
             } catch (Exception e) {

+ 15 - 1
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java

@@ -57,6 +57,11 @@ public class SchemaItem {
         this.selectFields = selectFields;
     }
 
+    public String toSql() {
+        // todo
+        return null;
+    }
+
     public Map<String, List<TableItem>> getTableItemAliases() {
         if (tableItemAliases == null) {
             synchronized (SchemaItem.class) {
@@ -80,7 +85,7 @@ public class SchemaItem {
                     columnFields = new LinkedHashMap<>();
                     getSelectFields()
                         .forEach((fieldName, fieldItem) -> fieldItem.getColumnItems().forEach(columnItem -> {
-                            TableItem tableItem = getAliasTableItems().get(columnItem.getOwner());
+                            // TableItem tableItem = getAliasTableItems().get(columnItem.getOwner());
                             // if (!tableItem.isSubQuery()) {
                             List<FieldItem> fieldItems = columnFields.computeIfAbsent(
                                 columnItem.getOwner() + "." + columnItem.getColumnName(),
@@ -316,6 +321,7 @@ public class SchemaItem {
     public static class FieldItem {
 
         private String           fieldName;
+        private String           expr;
         private List<ColumnItem> columnItems = new ArrayList<>();
         private List<String>     owners      = new ArrayList<>();
 
@@ -330,6 +336,14 @@ public class SchemaItem {
             this.fieldName = fieldName;
         }
 
+        public String getExpr() {
+            return expr;
+        }
+
+        public void setExpr(String expr) {
+            this.expr = expr;
+        }
+
         public List<ColumnItem> getColumnItems() {
             return columnItems;
         }

+ 9 - 6
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SqlParser.java

@@ -25,7 +25,7 @@ import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.TableItem;
 
 /**
  * ES同步指定sql格式解析
- * 
+ *
  * @author rewerma 2018-10-26 下午03:45:49
  * @version 1.0.0
  */
@@ -33,7 +33,7 @@ public class SqlParser {
 
     /**
      * 解析sql
-     * 
+     *
      * @param sql sql
      * @return 视图对象
      */
@@ -66,7 +66,7 @@ public class SqlParser {
 
     /**
      * 归集字段
-     * 
+     *
      * @param sqlSelectQueryBlock sqlSelectQueryBlock
      * @return 字段属性列表
      */
@@ -74,6 +74,7 @@ public class SqlParser {
         return sqlSelectQueryBlock.getSelectList().stream().map(selectItem -> {
             FieldItem fieldItem = new FieldItem();
             fieldItem.setFieldName(selectItem.getAlias());
+            fieldItem.setExpr(selectItem.toString());
             visitColumn(selectItem.getExpr(), fieldItem);
             return fieldItem;
         }).collect(Collectors.toList());
@@ -81,7 +82,7 @@ public class SqlParser {
 
     /**
      * 解析字段
-     * 
+     *
      * @param expr sql expr
      * @param fieldItem 字段属性
      */
@@ -91,6 +92,7 @@ public class SqlParser {
             SQLIdentifierExpr identifierExpr = (SQLIdentifierExpr) expr;
             if (fieldItem.getFieldName() == null) {
                 fieldItem.setFieldName(identifierExpr.getName());
+                fieldItem.setExpr(identifierExpr.toString());
             }
             ColumnItem columnItem = new ColumnItem();
             columnItem.setColumnName(identifierExpr.getName());
@@ -101,6 +103,7 @@ public class SqlParser {
             SQLPropertyExpr sqlPropertyExpr = (SQLPropertyExpr) expr;
             if (fieldItem.getFieldName() == null) {
                 fieldItem.setFieldName(sqlPropertyExpr.getName());
+                fieldItem.setExpr(sqlPropertyExpr.toString());
             }
             fieldItem.getOwners().add(sqlPropertyExpr.getOwnernName());
             ColumnItem columnItem = new ColumnItem();
@@ -123,7 +126,7 @@ public class SqlParser {
 
     /**
      * 解析表
-     * 
+     *
      * @param schemaItem 视图对象
      * @param sqlTableSource sqlTableSource
      * @param tableItems 表对象列表
@@ -178,7 +181,7 @@ public class SqlParser {
 
     /**
      * 解析on条件
-     * 
+     *
      * @param expr sql expr
      * @param tableItem 表对象
      */

+ 66 - 70
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java

@@ -1,6 +1,9 @@
 package com.alibaba.otter.canal.client.adapter.es.service;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -14,7 +17,9 @@ import org.elasticsearch.action.bulk.BulkItemResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
 import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchType;
 import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.search.SearchHit;
@@ -51,7 +56,7 @@ public class ESEtlService {
         this.config = config;
     }
 
-    public EtlResult importData(List<String> params, boolean bulk) {
+    public EtlResult importData(List<String> params) {
         EtlResult etlResult = new EtlResult();
         AtomicLong impCount = new AtomicLong();
         List<String> errMsg = new ArrayList<>();
@@ -93,54 +98,49 @@ public class ESEtlService {
                 logger.debug("etl sql : {}", mapping.getSql());
             }
 
-            if (bulk) {
-                // 获取总数
-                String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
-                long cnt = (Long) ESSyncUtil.sqlRS(dataSource, countSql, rs -> {
-                    Long count = null;
-                    try {
-                        if (rs.next()) {
-                            count = ((Number) rs.getObject(1)).longValue();
-                        }
-                    } catch (Exception e) {
-                        logger.error(e.getMessage(), e);
-                    }
-                    return count == null ? 0L : count;
-                });
-
-                // 当大于1万条记录时开启多线程
-                if (cnt >= 10000) {
-                    int threadCount = 3; // TODO 从配置读取默认为3
-                    long perThreadCnt = cnt / threadCount;
-                    ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                    List<Future<Boolean>> futures = new ArrayList<>(threadCount);
-                    for (int i = 0; i < threadCount; i++) {
-                        long offset = i * perThreadCnt;
-                        Long size = null;
-                        if (i != threadCount - 1) {
-                            size = perThreadCnt;
-                        }
-                        String sqlFinal;
-                        if (size != null) {
-                            sqlFinal = sql + " LIMIT " + offset + "," + size;
-                        } else {
-                            sqlFinal = sql + " LIMIT " + offset + "," + cnt;
-                        }
-                        Future<Boolean> future = executor
-                            .submit(() -> executeSqlImport(dataSource, sqlFinal, mapping, impCount, errMsg));
-                        futures.add(future);
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) ESSyncUtil.sqlRS(dataSource, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
                     }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0L : count;
+            });
 
-                    for (Future<Boolean> future : futures) {
-                        future.get();
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3; // 从配置读取默认为3
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
                     }
+                    Future<Boolean> future = executor
+                        .submit(() -> executeSqlImport(dataSource, sqlFinal, mapping, impCount, errMsg));
+                    futures.add(future);
+                }
 
-                    executor.shutdown();
-                } else {
-                    executeSqlImport(dataSource, sql, mapping, impCount, errMsg);
+                for (Future<Boolean> future : futures) {
+                    future.get();
                 }
+
+                executor.shutdown();
             } else {
-                logger.info("自动ETL,无需统计记录总条数,直接进行ETL, index: {}", esIndex);
                 executeSqlImport(dataSource, sql, mapping, impCount, errMsg);
             }
 
@@ -158,7 +158,7 @@ public class ESEtlService {
         return etlResult;
     }
 
-    private void processFailBulkResponse(BulkResponse bulkResponse, boolean hasParent) {
+    private void processFailBulkResponse(BulkResponse bulkResponse) {
         for (BulkItemResponse response : bulkResponse.getItems()) {
             if (!response.isFailed()) {
                 continue;
@@ -205,31 +205,27 @@ public class ESEtlService {
                         }
 
                         if (idVal != null) {
-                            if (mapping.getParent() == null) {
+                            if (mapping.isUpsert()) {
+                                bulkRequestBuilder.add(transportClient
+                                    .prepareUpdate(mapping.get_index(), mapping.get_type(), idVal.toString())
+                                    .setDoc(esFieldData)
+                                    .setDocAsUpsert(true));
+                            } else {
                                 bulkRequestBuilder.add(transportClient
                                     .prepareIndex(mapping.get_index(), mapping.get_type(), idVal.toString())
                                     .setSource(esFieldData));
-                            } else {
-                                // ignore
                             }
                         } else {
                             idVal = rs.getObject(mapping.getPk());
-                            if (mapping.getParent() == null) {
-                                // 删除pk对应的数据
-                                SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-                                    .setTypes(mapping.get_type())
-                                    .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal))
-                                    .get();
-                                for (SearchHit hit : response.getHits()) {
-                                    bulkRequestBuilder.add(transportClient
-                                        .prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
-                                }
-
-                                bulkRequestBuilder
-                                    .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type())
-                                        .setSource(esFieldData));
-                            } else {
-                                // ignore
+                            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                                .setTypes(mapping.get_type())
+                                .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal))
+                                .setSize(10000)
+                                .get();
+                            for (SearchHit hit : response.getHits()) {
+                                bulkRequestBuilder.add(
+                                    transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                                        .setDoc(esFieldData));
                             }
                         }
 
@@ -238,11 +234,11 @@ public class ESEtlService {
                             long esBatchBegin = System.currentTimeMillis();
                             BulkResponse rp = bulkRequestBuilder.execute().actionGet();
                             if (rp.hasFailures()) {
-                                this.processFailBulkResponse(rp, Objects.nonNull(mapping.getParent()));
+                                this.processFailBulkResponse(rp);
                             }
 
-                            if (logger.isDebugEnabled()) {
-                                logger.debug("全量数据批量导入批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
+                            if (logger.isTraceEnabled()) {
+                                logger.trace("全量数据批量导入批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
                                     (System.currentTimeMillis() - batchBegin),
                                     (System.currentTimeMillis() - esBatchBegin),
                                     bulkRequestBuilder.numberOfActions(),
@@ -259,10 +255,10 @@ public class ESEtlService {
                         long esBatchBegin = System.currentTimeMillis();
                         BulkResponse rp = bulkRequestBuilder.execute().actionGet();
                         if (rp.hasFailures()) {
-                            this.processFailBulkResponse(rp, Objects.nonNull(mapping.getParent()));
+                            this.processFailBulkResponse(rp);
                         }
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("全量数据批量导入最后批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
+                        if (logger.isTraceEnabled()) {
+                            logger.trace("全量数据批量导入最后批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
                                 (System.currentTimeMillis() - batchBegin),
                                 (System.currentTimeMillis() - esBatchBegin),
                                 bulkRequestBuilder.numberOfActions(),

+ 73 - 91
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java

@@ -71,7 +71,12 @@ public class ESSyncService {
                     dml.getDestination());
             }
             if (logger.isDebugEnabled()) {
-                logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                StringBuilder configIndexes = new StringBuilder();
+                esSyncConfigs
+                    .forEach(esSyncConfig -> configIndexes.append(esSyncConfig.getEsMapping().get_index()).append(" "));
+                logger.debug("DML: {} \nEffect indexes: {}",
+                    JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue),
+                    configIndexes.toString());
             }
         }
     }
@@ -92,6 +97,8 @@ public class ESSyncService {
                 update(config, dml);
             } else if (type != null && type.equalsIgnoreCase("DELETE")) {
                 delete(config, dml);
+            } else {
+                return;
             }
 
             if (logger.isTraceEnabled()) {
@@ -329,33 +336,50 @@ public class ESSyncService {
 
             // ------是主表------
             if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) {
-                FieldItem idFieldItem = schemaItem.getIdFieldItem(mapping);
-                // 主键为简单字段
-                if (!idFieldItem.isMethod() && !idFieldItem.isBinaryOp()) {
-                    Object idVal = esTemplate.getValFromData(mapping,
-                        data,
-                        idFieldItem.getFieldName(),
-                        idFieldItem.getColumn().getColumnName());
-
-                    if (logger.isTraceEnabled()) {
-                        logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, id: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index(),
-                            idVal);
-                    }
-                    boolean result = esTemplate.delete(mapping, idVal);
-                    if (!result) {
-                        logger.error("Main table delete es index error, destination:{}, table: {}, index: {}, id: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index(),
-                            idVal);
+                if (mapping.get_id() != null) {
+                    FieldItem idFieldItem = schemaItem.getIdFieldItem(mapping);
+                    // 主键为简单字段
+                    if (!idFieldItem.isMethod() && !idFieldItem.isBinaryOp()) {
+                        Object idVal = esTemplate.getValFromData(mapping,
+                            data,
+                            idFieldItem.getFieldName(),
+                            idFieldItem.getColumn().getColumnName());
+
+                        if (logger.isTraceEnabled()) {
+                            logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, id: {}",
+                                config.getDestination(),
+                                dml.getTable(),
+                                mapping.get_index(),
+                                idVal);
+                        }
+                        esTemplate.delete(mapping, idVal, null);
+                    } else {
+                        // ------主键带函数, 查询sql获取主键删除------
+                        // FIXME 删除时反查sql为空记录, 无法获获取 id field 值
+                        mainTableDelete(config, dml, data);
                     }
                 } else {
-                    // ------主键带函数, 查询sql获取主键删除------
-                    mainTableDelete(config, dml, data);
+                    FieldItem pkFieldItem = schemaItem.getIdFieldItem(mapping);
+                    if (!pkFieldItem.isMethod() && !pkFieldItem.isBinaryOp()) {
+                        Map<String, Object> esFieldData = new LinkedHashMap<>();
+                        Object pkVal = esTemplate.getESDataFromDmlData(mapping, data, esFieldData);
+
+                        if (logger.isTraceEnabled()) {
+                            logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, pk: {}",
+                                config.getDestination(),
+                                dml.getTable(),
+                                mapping.get_index(),
+                                pkVal);
+                        }
+                        esFieldData.remove(pkFieldItem.getFieldName());
+                        esFieldData.keySet().forEach(key -> esFieldData.put(key, null));
+                        esTemplate.delete(mapping, pkVal, esFieldData);
+                    } else {
+                        // ------主键带函数, 查询sql获取主键删除------
+                        mainTableDelete(config, dml, data);
+                    }
                 }
+
             }
 
             // 从表的操作
@@ -417,14 +441,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 idVal);
         }
-        boolean result = esTemplate.insert(mapping, idVal, esFieldData);
-        if (!result) {
-            logger.error("Single table insert to es index error, destination:{}, table: {}, index: {}, id: {}",
-                config.getDestination(),
-                dml.getTable(),
-                mapping.get_index(),
-                idVal);
-        }
+        esTemplate.insert(mapping, idVal, esFieldData);
     }
 
     /**
@@ -461,15 +478,7 @@ public class ESSyncService {
                             mapping.get_index(),
                             idVal);
                     }
-                    boolean result = esTemplate.insert(mapping, idVal, esFieldData);
-                    if (!result) {
-                        logger.error(
-                            "Main table insert to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index(),
-                            idVal);
-                    }
+                    esTemplate.insert(mapping, idVal, esFieldData);
                 }
             } catch (Exception e) {
                 throw new RuntimeException(e);
@@ -493,6 +502,15 @@ public class ESSyncService {
         }
         ESSyncUtil.sqlRS(ds, sql, rs -> {
             try {
+                Map<String, Object> esFieldData = null;
+                if (mapping.getPk() != null) {
+                    esFieldData = new LinkedHashMap<>();
+                    esTemplate.getESDataFromDmlData(mapping, data, esFieldData);
+                    esFieldData.remove(mapping.getPk());
+                    for (String key : esFieldData.keySet()) {
+                        esFieldData.put(key, null);
+                    }
+                }
                 while (rs.next()) {
                     Object idVal = esTemplate.getIdValFromRS(mapping, rs);
 
@@ -504,15 +522,7 @@ public class ESSyncService {
                             mapping.get_index(),
                             idVal);
                     }
-                    boolean result = esTemplate.delete(mapping, idVal);
-                    if (!result) {
-                        logger.error(
-                            "Main table delete to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index(),
-                            idVal);
-                    }
+                    esTemplate.delete(mapping, idVal, esFieldData);
                 }
             } catch (Exception e) {
                 throw new RuntimeException(e);
@@ -558,13 +568,7 @@ public class ESSyncService {
                 dml.getTable(),
                 mapping.get_index());
         }
-        boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
-        if (!result) {
-            logger.error("Join table update es index by foreign key error, destination:{}, table: {}, index: {}",
-                config.getDestination(),
-                dml.getTable(),
-                mapping.get_index());
-        }
+        esTemplate.updateByQuery(config, paramsTmp, esFieldData);
     }
 
     /**
@@ -652,14 +656,7 @@ public class ESSyncService {
                             dml.getTable(),
                             mapping.get_index());
                     }
-                    boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
-                    if (!result) {
-                        logger.error(
-                            "Join table update es index by query sql error, destination:{}, table: {}, index: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index());
-                    }
+                    esTemplate.updateByQuery(config, paramsTmp, esFieldData);
                 }
             } catch (Exception e) {
                 throw new RuntimeException(e);
@@ -761,14 +758,7 @@ public class ESSyncService {
                             dml.getTable(),
                             mapping.get_index());
                     }
-                    boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
-                    if (!result) {
-                        logger.error(
-                            "Join table update es index by query whole sql error, destination:{}, table: {}, index: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index());
-                    }
+                    esTemplate.updateByQuery(config, paramsTmp, esFieldData);
                 }
             } catch (Exception e) {
                 throw new RuntimeException(e);
@@ -799,14 +789,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 idVal);
         }
-        boolean result = esTemplate.update(mapping, idVal, esFieldData);
-        if (!result) {
-            logger.error("Main table update to es index error, destination:{}, table: {}, index: {}, id: {}",
-                config.getDestination(),
-                dml.getTable(),
-                mapping.get_index(),
-                idVal);
-        }
+        esTemplate.update(mapping, idVal, esFieldData);
     }
 
     /**
@@ -843,15 +826,7 @@ public class ESSyncService {
                             mapping.get_index(),
                             idVal);
                     }
-                    boolean result = esTemplate.update(mapping, idVal, esFieldData);
-                    if (!result) {
-                        logger.error(
-                            "Main table update to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
-                            config.getDestination(),
-                            dml.getTable(),
-                            mapping.get_index(),
-                            idVal);
-                    }
+                    esTemplate.update(mapping, idVal, esFieldData);
                 }
             } catch (Exception e) {
                 throw new RuntimeException(e);
@@ -859,4 +834,11 @@ public class ESSyncService {
             return 0;
         });
     }
+
+    /**
+     * 提交批次
+     */
+    public void commit() {
+        esTemplate.commit();
+    }
 }

+ 107 - 205
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java

@@ -2,14 +2,13 @@ package com.alibaba.otter.canal.client.adapter.es.support;
 
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.util.*;
+import java.util.LinkedHashMap;
+import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.TimeUnit;
 
 import javax.sql.DataSource;
 
-import com.alibaba.fastjson.JSON;
 import org.elasticsearch.action.bulk.BulkItemResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
@@ -18,18 +17,11 @@ import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.cluster.metadata.MappingMetaData;
 import org.elasticsearch.common.collect.ImmutableOpenMap;
 import org.elasticsearch.index.query.BoolQueryBuilder;
-import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.reindex.BulkByScrollResponse;
-import org.elasticsearch.index.reindex.UpdateByQueryAction;
-import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder;
 import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.script.Script;
-import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.search.SearchHit;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.springframework.util.CollectionUtils;
 
 import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
 import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
@@ -52,254 +44,135 @@ public class ESTemplate {
 
     private TransportClient     transportClient;
 
+    private BulkRequestBuilder  bulkRequestBuilder;
+
     public ESTemplate(TransportClient transportClient){
         this.transportClient = transportClient;
+        this.bulkRequestBuilder = transportClient.prepareBulk();
+    }
+
+    public BulkRequestBuilder getBulk() {
+        return bulkRequestBuilder;
     }
 
     /**
      * 插入数据
-     * 
-     * @param mapping
-     * @param pkVal
-     * @param esFieldData
-     * @return
+     *
+     * @param mapping 配置对象
+     * @param pkVal 主键值
+     * @param esFieldData 数据Map
      */
-    public boolean insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+    public void insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
         if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
+            if (mapping.isUpsert()) {
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData)
+                    .setDocAsUpsert(true));
+            } else {
+                getBulk().add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
                     .setSource(esFieldData));
+            }
+            commitBulk();
         } else {
             SearchResponse response = transportClient.prepareSearch(mapping.get_index())
                 .setTypes(mapping.get_type())
                 .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
+                .setSize(10000)
                 .get();
             for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                    .setDoc(esFieldData));
+                commitBulk();
             }
-            bulkRequestBuilder
-                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type()).setSource(esFieldData));
         }
-        return commitBulkRequest(bulkRequestBuilder);
+
     }
 
     /**
      * 根据主键更新数据
-     * 
-     * @param mapping
-     * @param pkVal
-     * @param esFieldData
-     * @return
+     *
+     * @param mapping 配置对象
+     * @param pkVal 主键值
+     * @param esFieldData 数据Map
      */
-    public boolean update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-        append4Update(bulkRequestBuilder, mapping, pkVal, esFieldData);
-        return commitBulkRequest(bulkRequestBuilder);
-    }
-
-    public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal,
-                              Map<String, Object> esFieldData) {
-        if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
-                    .setDoc(esFieldData));
-        } else {
-            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-                .setTypes(mapping.get_type())
-                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
-                .get();
-            for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
-                        .setDoc(esFieldData));
-            }
-        }
+    public void update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        append4Update(mapping, pkVal, esFieldData);
+        commitBulk();
     }
 
     /**
      * update by query
      *
-     * @param config
-     * @param paramsTmp
-     * @param esFieldData
-     * @return
+     * @param config 配置对象
+     * @param paramsTmp sql查询条件
+     * @param esFieldData 数据Map
      */
-    public boolean updateByQuery(ESSyncConfig config, Map<String, Object> paramsTmp, Map<String, Object> esFieldData) {
+    public void updateByQuery(ESSyncConfig config, Map<String, Object> paramsTmp, Map<String, Object> esFieldData) {
         if (paramsTmp.isEmpty()) {
-            return false;
+            return;
         }
         ESMapping mapping = config.getEsMapping();
         BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
         paramsTmp.forEach((fieldName, value) -> queryBuilder.must(QueryBuilders.termsQuery(fieldName, value)));
 
-        SearchResponse response = transportClient.prepareSearch(mapping.get_index())
-            .setTypes(mapping.get_type())
-            .setSize(0)
-            .setQuery(queryBuilder)
-            .get();
-        long count = response.getHits().getTotalHits();
-        // 如果更新量大于Max, 查询sql批量更新
-        if (count > MAX_BATCH_SIZE) {
-            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
-
-            DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
-            // 查询sql更新
-            StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE ");
-            paramsTmp.forEach(
-                (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
-            int len = sql.length();
-            sql.delete(len - 4, len);
-            ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
-                int exeCount = 1;
-                try {
-                    BulkRequestBuilder bulkRequestBuilderTmp = bulkRequestBuilder;
-                    while (rs.next()) {
-                        Object idVal = getIdValFromRS(mapping, rs);
-                        append4Update(bulkRequestBuilderTmp, mapping, idVal, esFieldData);
-
-                        if (exeCount % mapping.getCommitBatch() == 0 && bulkRequestBuilderTmp.numberOfActions() > 0) {
-                            commitBulkRequest(bulkRequestBuilderTmp);
-                            bulkRequestBuilderTmp = transportClient.prepareBulk();
-                        }
-                        exeCount++;
-                    }
-
-                    if (bulkRequestBuilder.numberOfActions() > 0) {
-                        commitBulkRequest(bulkRequestBuilderTmp);
-                    }
-                } catch (Exception e) {
-                    throw new RuntimeException(e);
-                }
-                return 0;
-            });
-            return true;
-        } else {
-            return updateByQuery(mapping, queryBuilder, esFieldData, 1);
-        }
-    }
-
-    private boolean updateByQuery(ESMapping mapping, QueryBuilder queryBuilder, Map<String, Object> esFieldData,
-                                  int counter) {
-        if (CollectionUtils.isEmpty(esFieldData)) {
-            return true;
-        }
-
-        StringBuilder sb = new StringBuilder();
-        esFieldData.forEach((key, value) -> {
-            if (value instanceof Map) {
-                HashMap mapValue = (HashMap) value;
-                if (mapValue.containsKey("lon") && mapValue.containsKey("lat") && mapValue.size() == 2) {
-                    sb.append("ctx._source")
-                        .append("['")
-                        .append(key)
-                        .append("']")
-                        .append(" = [")
-                        .append(mapValue.get("lon"))
-                        .append(", ")
-                        .append(mapValue.get("lat"))
-                        .append("];");
-                } else {
-                    sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
-                    sb.append(JSON.toJSONString(value));
-                    sb.append(";");
+        // 查询sql批量更新
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE ");
+        paramsTmp.forEach(
+            (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
+        int len = sql.length();
+        sql.delete(len - 4, len);
+        Integer syncCount = (Integer) ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+            int count = 0;
+            try {
+                while (rs.next()) {
+                    Object idVal = getIdValFromRS(mapping, rs);
+                    append4Update(mapping, idVal, esFieldData);
+                    commitBulk();
+                    count++;
                 }
-            } else if (value instanceof List) {
-                sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
-                sb.append(JSON.toJSONString(value));
-                sb.append(";");
-            } else if (value instanceof String) {
-                sb.append("ctx._source")
-                    .append("['")
-                    .append(key)
-                    .append("']")
-                    .append(" = '")
-                    .append(value)
-                    .append("';");
-            } else {
-                sb.append("ctx._source").append("['").append(key).append("']").append(" = ").append(value).append(";");
+            } catch (Exception e) {
+                throw new RuntimeException(e);
             }
+            return count;
         });
-        String scriptLine = sb.toString();
         if (logger.isTraceEnabled()) {
-            logger.trace(scriptLine);
+            logger.trace("Update ES by query effect {} records", syncCount);
         }
-
-        UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(transportClient);
-        updateByQuery.source(mapping.get_index())
-            .abortOnVersionConflict(false)
-            .filter(queryBuilder)
-            .script(new Script(ScriptType.INLINE, "painless", scriptLine, Collections.emptyMap()));
-
-        BulkByScrollResponse response = updateByQuery.get();
-        if (logger.isTraceEnabled()) {
-            logger.trace("updateByQuery response: {}", response.getStatus());
-        }
-        if (!CollectionUtils.isEmpty(response.getSearchFailures())) {
-            logger.error("script update_for_search has search error: " + response.getBulkFailures());
-            return false;
-        }
-
-        if (!CollectionUtils.isEmpty(response.getBulkFailures())) {
-            logger.error("script update_for_search has update error: " + response.getBulkFailures());
-            return false;
-        }
-
-        if (response.getStatus().getVersionConflicts() > 0) {
-            if (counter >= 3) {
-                logger.error("第 {} 次执行updateByQuery, 依旧存在分片版本冲突,不再继续重试。", counter);
-                return false;
-            }
-            logger.warn("本次updateByQuery存在分片版本冲突,准备重新执行...");
-            try {
-                TimeUnit.SECONDS.sleep(1);
-            } catch (InterruptedException e) {
-                // ignore
-            }
-            return updateByQuery(mapping, queryBuilder, esFieldData, ++counter);
-        }
-
-        return true;
     }
 
     /**
      * 通过主键删除数据
      *
-     * @param mapping
-     * @param pkVal
-     * @return
+     * @param mapping 配置对象
+     * @param pkVal 主键值
+     * @param esFieldData 数据Map
      */
-    public boolean delete(ESMapping mapping, Object pkVal) {
-        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+    public void delete(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
         if (mapping.get_id() != null) {
-            bulkRequestBuilder
-                .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), pkVal.toString()));
+            getBulk().add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), pkVal.toString()));
+            commitBulk();
         } else {
             SearchResponse response = transportClient.prepareSearch(mapping.get_index())
                 .setTypes(mapping.get_type())
                 .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
-                .setSize(MAX_BATCH_SIZE)
+                .setSize(10000)
                 .get();
             for (SearchHit hit : response.getHits()) {
-                bulkRequestBuilder
-                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                    .setDoc(esFieldData));
+                commitBulk();
             }
         }
-        return commitBulkRequest(bulkRequestBuilder);
+
     }
 
     /**
-     * 批量提交
-     *
-     * @param bulkRequestBuilder
-     * @return
+     * 提交批次
      */
-    private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) {
-        if (bulkRequestBuilder.numberOfActions() > 0) {
-            BulkResponse response = bulkRequestBuilder.execute().actionGet();
+    public void commit() {
+        if (getBulk().numberOfActions() > 0) {
+            BulkResponse response = getBulk().execute().actionGet();
             if (response.hasFailures()) {
                 for (BulkItemResponse itemResponse : response.getItems()) {
                     if (!itemResponse.isFailed()) {
@@ -307,16 +180,45 @@ public class ESTemplate {
                     }
 
                     if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) {
-                        logger.warn(itemResponse.getFailureMessage());
+                        logger.error(itemResponse.getFailureMessage());
                     } else {
-                        logger.error("ES sync commit error: {}", itemResponse.getFailureMessage());
+                        throw new RuntimeException("ES sync commit error" + itemResponse.getFailureMessage());
                     }
                 }
             }
+        }
+    }
 
-            return !response.hasFailures();
+    /**
+     * 如果大于批量数则提交批次
+     */
+    private void commitBulk() {
+        if (getBulk().numberOfActions() >= MAX_BATCH_SIZE) {
+            commit();
+        }
+    }
+
+    private void append4Update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        if (mapping.get_id() != null) {
+            if (mapping.isUpsert()) {
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData)
+                    .setDocAsUpsert(true));
+            } else {
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData));
+            }
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(10000)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                    .setDoc(esFieldData));
+            }
         }
-        return true;
     }
 
     public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName,
@@ -461,7 +363,7 @@ public class ESTemplate {
                 resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
             }
 
-            if (dmlOld.get(columnName) != null && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+            if (dmlOld.containsKey(columnName) && !mapping.getSkips().contains(fieldItem.getFieldName())) {
                 esFieldData.put(fieldItem.getFieldName(),
                     getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName));
             }

+ 4 - 4
client-adapter/elasticsearch/src/main/resources/es/mytest_user.yml

@@ -1,15 +1,15 @@
 dataSourceKey: defaultDS
 destination: example
+groupId: g1
 esMapping:
   _index: mytest_user
   _type: _doc
   _id: _id
+  upsert: true
 #  pk: id
   sql: "select a.id as _id, a.name as _name, a.role_id as _role_id, b.role_name as _role_name,
-        a.c_time as _c_time, c.labels as _labels from user a
-        left join role b on b.id=a.role_id
-        left join (select user_id, group_concat(label order by id desc separator ';') as labels from label
-        group by user_id) c on c.user_id=a.id"
+        a.c_time as _c_time from user a
+        left join role b on b.id=a.role_id"
 #  objFields:
 #    _labels: array:;
   etlCondition: "where a.c_time>='{0}'"

+ 1 - 1
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java

@@ -21,7 +21,7 @@ public class ConfigLoadTest {
 
     @Test
     public void testLoad() {
-        Map<String, ESSyncConfig> configMap = ESSyncConfigLoader.load();
+        Map<String, ESSyncConfig> configMap = ESSyncConfigLoader.load(null);
         ESSyncConfig config = configMap.get("mytest_user.yml");
         Assert.assertNotNull(config);
         Assert.assertEquals("defaultDS", config.getDataSourceKey());

+ 1 - 1
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/SqlParseTest.java

@@ -29,7 +29,7 @@ public class SqlParseTest {
         Assert.assertFalse(tableItem.isMain());
         Assert.assertTrue(tableItem.isSubQuery());
         // 通过字段名找 FieldItem
-        List<FieldItem> fieldItems = schemaItem.getColumnFields().get(tableItem.getAlias() + ".label".toLowerCase());
+        List<FieldItem> fieldItems = schemaItem.getColumnFields().get(tableItem.getAlias() + ".labels".toLowerCase());
         fieldItems.forEach(
             fieldItem -> Assert.assertEquals("c.labels", fieldItem.getOwner() + "." + fieldItem.getFieldName()));
 

+ 1 - 1
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java

@@ -26,7 +26,7 @@ public class Common {
         outerAdapterConfig.setProperties(properties);
 
         ESAdapter esAdapter = new ESAdapter();
-        esAdapter.init(outerAdapterConfig);
+        esAdapter.init(outerAdapterConfig, null);
         return esAdapter;
     }
 

+ 0 - 6
client-adapter/hbase/pom.xml

@@ -17,12 +17,6 @@
             <version>${project.version}</version>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.yaml</groupId>
-            <artifactId>snakeyaml</artifactId>
-            <version>1.19</version>
-            <scope>provided</scope>
-        </dependency>
         <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-client</artifactId>

+ 39 - 12
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java

@@ -1,10 +1,7 @@
 package com.alibaba.otter.canal.client.adapter.hbase;
 
 import java.io.IOException;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 
 import javax.sql.DataSource;
@@ -49,6 +46,8 @@ public class HbaseAdapter implements OuterAdapter {
 
     private HbaseConfigMonitor                      configMonitor;
 
+    private Properties                              envProperties;
+
     public Map<String, MappingConfig> getHbaseMapping() {
         return hbaseMapping;
     }
@@ -58,9 +57,10 @@ public class HbaseAdapter implements OuterAdapter {
     }
 
     @Override
-    public void init(OuterAdapterConfig configuration) {
+    public void init(OuterAdapterConfig configuration, Properties envProperties) {
         try {
-            Map<String, MappingConfig> hbaseMappingTmp = MappingConfigLoader.load();
+            this.envProperties = envProperties;
+            Map<String, MappingConfig> hbaseMappingTmp = MappingConfigLoader.load(envProperties);
             // 过滤不匹配的key的配置
             hbaseMappingTmp.forEach((key, mappingConfig) -> {
                 if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null)
@@ -72,10 +72,19 @@ public class HbaseAdapter implements OuterAdapter {
             for (Map.Entry<String, MappingConfig> entry : hbaseMapping.entrySet()) {
                 String configName = entry.getKey();
                 MappingConfig mappingConfig = entry.getValue();
-                String k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                           + mappingConfig.getHbaseMapping().getDatabase() + "."
-                           + mappingConfig.getHbaseMapping().getTable();
-                Map<String, MappingConfig> configMap = mappingConfigCache.computeIfAbsent(k, k1 -> new HashMap<>());
+                String k;
+                if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                    k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-"
+                        + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_"
+                        + mappingConfig.getHbaseMapping().getDatabase() + "-"
+                        + mappingConfig.getHbaseMapping().getTable();
+                } else {
+                    k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_"
+                        + mappingConfig.getHbaseMapping().getDatabase() + "-"
+                        + mappingConfig.getHbaseMapping().getTable();
+                }
+                Map<String, MappingConfig> configMap = mappingConfigCache.computeIfAbsent(k,
+                    k1 -> new ConcurrentHashMap<>());
                 configMap.put(configName, mappingConfig);
             }
 
@@ -107,11 +116,29 @@ public class HbaseAdapter implements OuterAdapter {
             return;
         }
         String destination = StringUtils.trimToEmpty(dml.getDestination());
+        String groupId = StringUtils.trimToEmpty(dml.getGroupId());
         String database = dml.getDatabase();
         String table = dml.getTable();
-        Map<String, MappingConfig> configMap = mappingConfigCache.get(destination + "." + database + "." + table);
+        Map<String, MappingConfig> configMap;
+        if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+            configMap = mappingConfigCache.get(destination + "-" + groupId + "_" + database + "-" + table);
+        } else {
+            configMap = mappingConfigCache.get(destination + "_" + database + "-" + table);
+        }
         if (configMap != null) {
-            configMap.values().forEach(config -> hbaseSyncService.sync(config, dml));
+            List<MappingConfig> configs = new ArrayList<>();
+            configMap.values().forEach(config -> {
+                if (StringUtils.isNotEmpty(config.getGroupId())) {
+                    if (config.getGroupId().equals(dml.getGroupId())) {
+                        configs.add(config);
+                    }
+                } else {
+                    configs.add(config);
+                }
+            });
+            if (!configs.isEmpty()) {
+                configs.forEach(config -> hbaseSyncService.sync(config, dml));
+            }
         }
     }
 

+ 10 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfig.java

@@ -14,6 +14,8 @@ public class MappingConfig {
 
     private String       outerAdapterKey; // adapter key
 
+    private String       groupId;         // groupId
+
     private String       destination;     // canal实例或MQ的topic
 
     private HbaseMapping hbaseMapping;    // hbase映射配置
@@ -26,6 +28,14 @@ public class MappingConfig {
         this.dataSourceKey = dataSourceKey;
     }
 
+    public String getGroupId() {
+        return groupId;
+    }
+
+    public void setGroupId(String groupId) {
+        this.groupId = groupId;
+    }
+
     public String getOuterAdapterKey() {
         return outerAdapterKey;
     }

+ 8 - 7
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java

@@ -2,12 +2,12 @@ package com.alibaba.otter.canal.client.adapter.hbase.config;
 
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Properties;
 
-import com.alibaba.fastjson.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
 
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
 import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
 
 /**
@@ -25,17 +25,18 @@ public class MappingConfigLoader {
      *
      * @return 配置名/配置文件名--对象
      */
-    @SuppressWarnings("unchecked")
-    public static Map<String, MappingConfig> load() {
+    public static Map<String, MappingConfig> load(Properties envProperties) {
         logger.info("## Start loading hbase mapping config ... ");
 
         Map<String, MappingConfig> result = new LinkedHashMap<>();
 
         Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("hbase");
         configContentMap.forEach((fileName, content) -> {
-            Map configMap = new Yaml().loadAs(content, Map.class); // yml自带的对象反射不是很稳定
-            JSONObject configJson = new JSONObject(configMap);
-            MappingConfig config = configJson.toJavaObject(MappingConfig.class);
+            MappingConfig config = YmlConfigBinder
+                .bindYmlToObj(null, content, MappingConfig.class, null, envProperties);
+            if (config == null) {
+                return;
+            }
             try {
                 config.validate();
             } catch (Exception e) {

+ 397 - 392
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java

@@ -1,392 +1,397 @@
-package com.alibaba.otter.canal.client.adapter.hbase.service;
-
-import java.sql.*;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
-
-import com.alibaba.otter.canal.client.adapter.support.Util;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.hbase.support.*;
-import com.alibaba.otter.canal.client.adapter.support.EtlResult;
-import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
-import com.google.common.base.Joiner;
-
-/**
- * HBase ETL 操作业务类
- *
- * @author rewerma @ 2018-10-20
- * @version 1.0.0
- */
-public class HbaseEtlService {
-
-    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
-
-
-    /**
-     * 建表
-     * 
-     * @param hbaseTemplate
-     * @param config
-     */
-    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
-        try {
-            // 判断hbase表是否存在,不存在则建表
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 导入数据
-     * 
-     * @param ds 数据源
-     * @param hbaseTemplate hbaseTemplate
-     * @param config 配置
-     * @param params 筛选条件
-     * @return 导入结果
-     */
-    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
-                                       List<String> params) {
-        EtlResult etlResult = new EtlResult();
-        AtomicLong successCount = new AtomicLong();
-        List<String> errMsg = new ArrayList<>();
-        String hbaseTable = "";
-        try {
-            if (config == null) {
-                logger.error("Config is null!");
-                etlResult.setSucceeded(false);
-                etlResult.setErrorMessage("Config is null!");
-                return etlResult;
-            }
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            hbaseTable = hbaseMapping.getHbaseTable();
-
-            long start = System.currentTimeMillis();
-
-            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
-                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
-                // 如果表存在则删除
-                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
-                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
-                }
-                params = null;
-            } else {
-                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
-            }
-            createTable(hbaseTemplate, config);
-
-            // 拼接sql
-            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
-
-            // 拼接条件
-            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
-                AtomicBoolean stExists = new AtomicBoolean(false);
-                // 验证是否有SYS_TIME字段
-                Util.sqlRS(ds, sql, rs -> {
-                    try {
-                        ResultSetMetaData rsmd = rs.getMetaData();
-                        int cnt = rsmd.getColumnCount();
-                        for (int i = 1; i <= cnt; i++) {
-                            String columnName = rsmd.getColumnName(i);
-                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
-                                stExists.set(true);
-                                break;
-                            }
-                        }
-                    } catch (Exception e) {
-                        // ignore
-                    }
-                    return null;
-                });
-                if (stExists.get()) {
-                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
-                }
-            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
-                String etlCondition = hbaseMapping.getEtlCondition();
-                int size = params.size();
-                for (int i = 0; i < size; i++) {
-                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
-                }
-
-                sql += " " + etlCondition;
-            }
-
-            // 获取总数
-            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
-            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
-                Long count = null;
-                try {
-                    if (rs.next()) {
-                        count = ((Number) rs.getObject(1)).longValue();
-                    }
-                } catch (Exception e) {
-                    logger.error(e.getMessage(), e);
-                }
-                return count == null ? 0 : count;
-            });
-
-            // 当大于1万条记录时开启多线程
-            if (cnt >= 10000) {
-                int threadCount = 3;
-                long perThreadCnt = cnt / threadCount;
-                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
-                for (int i = 0; i < threadCount; i++) {
-                    long offset = i * perThreadCnt;
-                    Long size = null;
-                    if (i != threadCount - 1) {
-                        size = perThreadCnt;
-                    }
-                    String sqlFinal;
-                    if (size != null) {
-                        sqlFinal = sql + " LIMIT " + offset + "," + size;
-                    } else {
-                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
-                    }
-                    Future<Boolean> future = executor.submit(
-                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
-                    futures.add(future);
-                }
-
-                for (Future<Boolean> future : futures) {
-                    future.get();
-                }
-
-                executor.shutdown();
-            } else {
-                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
-            }
-
-            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
-                        + (System.currentTimeMillis() - start) / 1000 + "s!");
-
-            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
-        }
-
-        if (errMsg.isEmpty()) {
-            etlResult.setSucceeded(true);
-        } else {
-            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
-        }
-        return etlResult;
-    }
-
-    /**
-     * 执行导入
-     * 
-     * @param ds
-     * @param sql
-     * @param hbaseMapping
-     * @param hbaseTemplate
-     * @param successCount
-     * @param errMsg
-     * @return
-     */
-    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
-                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
-        try {
-            Util.sqlRS(ds, sql, rs -> {
-                int i = 1;
-
-                try {
-                    boolean complete = false;
-                    List<HRow> rows = new ArrayList<>();
-                    String[] rowKeyColumns = null;
-                    if (hbaseMapping.getRowKey() != null) {
-                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
-                    }
-                    while (rs.next()) {
-                        int cc = rs.getMetaData().getColumnCount();
-                        int[] jdbcTypes = new int[cc];
-                        Class<?>[] classes = new Class[cc];
-                        for (int j = 1; j <= cc; j++) {
-                            int jdbcType = rs.getMetaData().getColumnType(j);
-                            jdbcTypes[j - 1] = jdbcType;
-                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
-                        }
-                        HRow row = new HRow();
-
-                        if (rowKeyColumns != null) {
-                            // 取rowKey字段拼接
-                            StringBuilder rowKeyVale = new StringBuilder();
-                            for (String rowKeyColumnName : rowKeyColumns) {
-                                Object obj = rs.getObject(rowKeyColumnName);
-                                if (obj != null) {
-                                    rowKeyVale.append(obj.toString());
-                                }
-                                rowKeyVale.append("|");
-                            }
-                            int len = rowKeyVale.length();
-                            if (len > 0) {
-                                rowKeyVale.delete(len - 1, len);
-                            }
-                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
-                        }
-
-                        for (int j = 1; j <= cc; j++) {
-                            String columnName = rs.getMetaData().getColumnName(j);
-
-                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
-                            if (val == null) {
-                                continue;
-                            }
-
-                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
-                            // 没有配置映射
-                            if (columnItem == null) {
-                                String family = hbaseMapping.getFamily();
-                                String qualifile = columnName;
-                                if (hbaseMapping.isUppercaseQualifier()) {
-                                    qualifile = qualifile.toUpperCase();
-                                }
-                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(Bytes.toBytes(val.toString()));
-                                    } else {
-                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
-                                    }
-                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                    Type type = Type.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(TypeUtil.toBytes(val, type));
-                                    } else {
-                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
-                                    }
-                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                    PhType phType = PhType.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                    } else {
-                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
-                                    }
-                                }
-                            } else {
-                                // 如果不需要类型转换
-                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
-                                    if (val instanceof java.sql.Date) {
-                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
-                                        val = dateFmt.format((Date) val);
-                                    } else if (val instanceof Timestamp) {
-                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-                                        val = datetimeFmt.format((Date) val);
-                                    }
-
-                                    byte[] valBytes = Bytes.toBytes(val.toString());
-                                    if (columnItem.isRowKey()) {
-                                        if (columnItem.getRowKeyLen() != null) {
-                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.setRowKey(valBytes);
-                                        }
-                                    } else {
-                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                    }
-                                } else {
-                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                        byte[] valBytes = Bytes.toBytes(val.toString());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            }
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                        }
-                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                        Type type = Type.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
-                                                row.setRowKey(Bytes.toBytes(v));
-                                            } else {
-                                                row.setRowKey(TypeUtil.toBytes(val, type));
-                                            }
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                TypeUtil.toBytes(val, type));
-                                        }
-                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                        PhType phType = PhType.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                PhTypeUtil.toBytes(val, phType));
-                                        }
-                                    }
-                                }
-                            }
-                        }
-
-                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
-
-                        rows.add(row);
-                        complete = false;
-                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
-                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                            rows.clear();
-                            complete = true;
-                        }
-                        i++;
-                        successCount.incrementAndGet();
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("successful import count:" + successCount.get());
-                        }
-                    }
-
-                    if (!complete && !rows.isEmpty()) {
-                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                    }
-
-                } catch (Exception e) {
-                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
-                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
-                    // throw new RuntimeException(e);
-                }
-                return i;
-            });
-            return true;
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            return false;
-        }
-    }
-
-    private static String limitLenNum(int len, Object val) {
-        if (val == null) {
-            return null;
-        }
-        if (val instanceof Number) {
-            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
-        } else if (val instanceof String) {
-            return String.format("%0" + len + "d", Long.parseLong((String) val));
-        }
-        return null;
-    }
-}
+package com.alibaba.otter.canal.client.adapter.hbase.service;
+
+import java.sql.ResultSetMetaData;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import javax.sql.DataSource;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HRow;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhType;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhTypeUtil;
+import com.alibaba.otter.canal.client.adapter.hbase.support.Type;
+import com.alibaba.otter.canal.client.adapter.hbase.support.TypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+import com.google.common.base.Joiner;
+
+/**
+ * HBase ETL 操作业务类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class HbaseEtlService {
+
+    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
+
+
+    /**
+     * 建表
+     * 
+     * @param hbaseTemplate
+     * @param config
+     */
+    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
+        try {
+            // 判断hbase表是否存在,不存在则建表
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 导入数据
+     * 
+     * @param ds 数据源
+     * @param hbaseTemplate hbaseTemplate
+     * @param config 配置
+     * @param params 筛选条件
+     * @return 导入结果
+     */
+    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
+                                       List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong successCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String hbaseTable = "";
+        try {
+            if (config == null) {
+                logger.error("Config is null!");
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("Config is null!");
+                return etlResult;
+            }
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            hbaseTable = hbaseMapping.getHbaseTable();
+
+            long start = System.currentTimeMillis();
+
+            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
+                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
+                // 如果表存在则删除
+                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
+                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
+                }
+                params = null;
+            } else {
+                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
+            }
+            createTable(hbaseTemplate, config);
+
+            // 拼接sql
+            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
+
+            // 拼接条件
+            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
+                AtomicBoolean stExists = new AtomicBoolean(false);
+                // 验证是否有SYS_TIME字段
+                Util.sqlRS(ds, sql, rs -> {
+                    try {
+                        ResultSetMetaData rsmd = rs.getMetaData();
+                        int cnt = rsmd.getColumnCount();
+                        for (int i = 1; i <= cnt; i++) {
+                            String columnName = rsmd.getColumnName(i);
+                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
+                                stExists.set(true);
+                                break;
+                            }
+                        }
+                    } catch (Exception e) {
+                        // ignore
+                    }
+                    return null;
+                });
+                if (stExists.get()) {
+                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
+                }
+            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
+                String etlCondition = hbaseMapping.getEtlCondition();
+                int size = params.size();
+                for (int i = 0; i < size; i++) {
+                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+                }
+
+                sql += " " + etlCondition;
+            }
+
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0 : count;
+            });
+
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3;
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                    }
+                    Future<Boolean> future = executor.submit(
+                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
+                    futures.add(future);
+                }
+
+                for (Future<Boolean> future : futures) {
+                    future.get();
+                }
+
+                executor.shutdown();
+            } else {
+                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
+            }
+
+            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
+                        + (System.currentTimeMillis() - start) / 1000 + "s!");
+
+            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
+        }
+
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    /**
+     * 执行导入
+     * 
+     * @param ds
+     * @param sql
+     * @param hbaseMapping
+     * @param hbaseTemplate
+     * @param successCount
+     * @param errMsg
+     * @return
+     */
+    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
+                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
+        try {
+            Util.sqlRS(ds, sql, rs -> {
+                int i = 1;
+
+                try {
+                    boolean complete = false;
+                    List<HRow> rows = new ArrayList<>();
+                    String[] rowKeyColumns = null;
+                    if (hbaseMapping.getRowKey() != null) {
+                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                    }
+                    while (rs.next()) {
+                        int cc = rs.getMetaData().getColumnCount();
+                        int[] jdbcTypes = new int[cc];
+                        Class<?>[] classes = new Class[cc];
+                        for (int j = 1; j <= cc; j++) {
+                            int jdbcType = rs.getMetaData().getColumnType(j);
+                            jdbcTypes[j - 1] = jdbcType;
+                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
+                        }
+                        HRow row = new HRow();
+
+                        if (rowKeyColumns != null) {
+                            // 取rowKey字段拼接
+                            StringBuilder rowKeyVale = new StringBuilder();
+                            for (String rowKeyColumnName : rowKeyColumns) {
+                                Object obj = rs.getObject(rowKeyColumnName);
+                                if (obj != null) {
+                                    rowKeyVale.append(obj.toString());
+                                }
+                                rowKeyVale.append("|");
+                            }
+                            int len = rowKeyVale.length();
+                            if (len > 0) {
+                                rowKeyVale.delete(len - 1, len);
+                            }
+                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
+                        }
+
+                        for (int j = 1; j <= cc; j++) {
+                            String columnName = rs.getMetaData().getColumnName(j);
+
+                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
+                            if (val == null) {
+                                continue;
+                            }
+
+                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
+                            // 没有配置映射
+                            if (columnItem == null) {
+                                String family = hbaseMapping.getFamily();
+                                String qualifile = columnName;
+                                if (hbaseMapping.isUppercaseQualifier()) {
+                                    qualifile = qualifile.toUpperCase();
+                                }
+                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(Bytes.toBytes(val.toString()));
+                                    } else {
+                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
+                                    }
+                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                    Type type = Type.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(TypeUtil.toBytes(val, type));
+                                    } else {
+                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
+                                    }
+                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                    PhType phType = PhType.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                    } else {
+                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
+                                    }
+                                }
+                            } else {
+                                // 如果不需要类型转换
+                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
+                                    if (val instanceof java.sql.Date) {
+                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
+                                        val = dateFmt.format((Date) val);
+                                    } else if (val instanceof Timestamp) {
+                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+                                        val = datetimeFmt.format((Date) val);
+                                    }
+
+                                    byte[] valBytes = Bytes.toBytes(val.toString());
+                                    if (columnItem.isRowKey()) {
+                                        if (columnItem.getRowKeyLen() != null) {
+                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.setRowKey(valBytes);
+                                        }
+                                    } else {
+                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                    }
+                                } else {
+                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                        byte[] valBytes = Bytes.toBytes(val.toString());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            }
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                        }
+                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                        Type type = Type.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
+                                                row.setRowKey(Bytes.toBytes(v));
+                                            } else {
+                                                row.setRowKey(TypeUtil.toBytes(val, type));
+                                            }
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                TypeUtil.toBytes(val, type));
+                                        }
+                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                        PhType phType = PhType.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                PhTypeUtil.toBytes(val, phType));
+                                        }
+                                    }
+                                }
+                            }
+                        }
+
+                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
+
+                        rows.add(row);
+                        complete = false;
+                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
+                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                            rows.clear();
+                            complete = true;
+                        }
+                        i++;
+                        successCount.incrementAndGet();
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("successful import count:" + successCount.get());
+                        }
+                    }
+
+                    if (!complete && !rows.isEmpty()) {
+                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
+                    // throw new RuntimeException(e);
+                }
+                return i;
+            });
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private static String limitLenNum(int len, Object val) {
+        if (val == null) {
+            return null;
+        }
+        if (val instanceof Number) {
+            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
+        } else if (val instanceof String) {
+            return String.format("%0" + len + "d", Long.parseLong((String) val));
+        }
+        return null;
+    }
+}

+ 2 - 1
client-adapter/hbase/src/main/resources/hbase/mytest_person2.yml

@@ -1,5 +1,6 @@
 dataSourceKey: defaultDS
 destination: example
+groupId: g1
 hbaseMapping:
   mode: STRING  #NATIVE   #PHOENIX
   database: mytest  # 数据库名
@@ -57,4 +58,4 @@ hbaseMapping:
 # $UNSIGNED_TIMESTAMP       对应PHOENIX里的UNSIGNED_TIMESTAMP     12字节
 # $VARCHAR                  对应PHOENIX里的VARCHAR                动态长度
 # $VARBINARY                对应PHOENIX里的VARBINARY              动态长度
-# $DECIMAL                  对应PHOENIX里的DECIMAL                动态长度
+# $DECIMAL                  对应PHOENIX里的DECIMAL                动态长度

+ 3 - 2
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java

@@ -31,6 +31,7 @@ public abstract class AbstractCanalAdapterWorker {
     protected final Logger                    logger  = LoggerFactory.getLogger(this.getClass());
 
     protected String                          canalDestination;                                                // canal实例
+    protected String                          groupId = null;                                                  // groupId
     protected List<List<OuterAdapter>>        canalOuterAdapters;                                              // 外部适配器
     protected CanalClientConfig               canalClientConfig;                                               // 配置
     protected ExecutorService                 groupInnerExecutorService;                                       // 组内工作线程池
@@ -56,7 +57,7 @@ public abstract class AbstractCanalAdapterWorker {
                     // 组内适配器穿行运行,尽量不要配置组内适配器
                     adapters.forEach(adapter -> {
                         long begin = System.currentTimeMillis();
-                        List<Dml> dmls = MessageUtil.parse4Dml(canalDestination, message);
+                        List<Dml> dmls = MessageUtil.parse4Dml(canalDestination, groupId, message);
                         if (dmls != null) {
                             batchSync(dmls, adapter);
 
@@ -101,7 +102,7 @@ public abstract class AbstractCanalAdapterWorker {
                     // 组内适配器穿行运行,尽量不要配置组内适配器
                     outerAdapters.forEach(adapter -> {
                         long begin = System.currentTimeMillis();
-                        List<Dml> dmls = MessageUtil.flatMessage2Dml(canalDestination, flatMessages);
+                        List<Dml> dmls = MessageUtil.flatMessage2Dml(canalDestination, groupId, flatMessages);
                         batchSync(dmls, adapter);
 
                         if (logger.isDebugEnabled()) {

+ 5 - 2
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java

@@ -28,7 +28,8 @@ public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
         super(canalOuterAdapters);
         this.canalClientConfig = canalClientConfig;
         this.topic = topic;
-        this.canalDestination = topic;
+        super.canalDestination = topic;
+        super.groupId = groupId;
         this.flatMessage = flatMessage;
         this.connector = new KafkaCanalConnector(bootstrapServers,
             topic,
@@ -45,10 +46,12 @@ public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
             try {
                 Thread.sleep(1000);
             } catch (InterruptedException e) {
+                // ignore
             }
         }
         ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
-        int retry = canalClientConfig.getRetries() == null || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
+        int retry = canalClientConfig.getRetries() == null
+                    || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
         long timeout = canalClientConfig.getTimeout() == null ? 30000 : canalClientConfig.getTimeout(); // 默认超时30秒
 
         while (running) {

+ 216 - 198
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java

@@ -1,198 +1,216 @@
-package com.alibaba.otter.canal.adapter.launcher.loader;
-
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.otter.canal.client.adapter.OuterAdapter;
-import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
-import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
-import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
-
-/**
- * 外部适配器的加载器
- *
- * @version 1.0.0
- */
-public class CanalAdapterLoader {
-
-    private static final Logger                     logger        = LoggerFactory.getLogger(CanalAdapterLoader.class);
-
-    private CanalClientConfig                       canalClientConfig;
-
-    private Map<String, CanalAdapterWorker>         canalWorkers  = new HashMap<>();
-
-    private Map<String, AbstractCanalAdapterWorker> canalMQWorker = new HashMap<>();
-
-    private ExtensionLoader<OuterAdapter>           loader;
-
-    public CanalAdapterLoader(CanalClientConfig canalClientConfig){
-        this.canalClientConfig = canalClientConfig;
-    }
-
-    /**
-     * 初始化canal-client
-     */
-    public void init() {
-        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
-
-        String canalServerHost = this.canalClientConfig.getCanalServerHost();
-        SocketAddress sa = null;
-        if (canalServerHost != null) {
-            String[] ipPort = canalServerHost.split(":");
-            sa = new InetSocketAddress(ipPort[0], Integer.parseInt(ipPort[1]));
-        }
-        String zkHosts = this.canalClientConfig.getZookeeperHosts();
-
-        if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-
-                for (CanalClientConfig.Group connectorGroup : canalAdapter.getGroups()) {
-                    List<OuterAdapter> canalOutConnectors = new ArrayList<>();
-                    for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) {
-                        loadConnector(c, canalOutConnectors);
-                    }
-                    canalOuterAdapterGroups.add(canalOutConnectors);
-                }
-                CanalAdapterWorker worker;
-                if (sa != null) {
-                    worker = new CanalAdapterWorker(canalClientConfig,
-                        canalAdapter.getInstance(),
-                        sa,
-                        canalOuterAdapterGroups);
-                } else if (zkHosts != null) {
-                    worker = new CanalAdapterWorker(canalClientConfig,
-                        canalAdapter.getInstance(),
-                        zkHosts,
-                        canalOuterAdapterGroups);
-                } else {
-                    throw new RuntimeException("No canal server connector found");
-                }
-                canalWorkers.put(canalAdapter.getInstance(), worker);
-                worker.start();
-                logger.info("Start adapter for canal instance: {} succeed", canalAdapter.getInstance());
-            }
-        } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client-kafka的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
-                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
-                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
-                        loadConnector(config, canalOuterAdapters);
-                    }
-                    canalOuterAdapterGroups.add(canalOuterAdapters);
-
-                    CanalAdapterKafkaWorker canalKafkaWorker = new CanalAdapterKafkaWorker(canalClientConfig,
-                        canalClientConfig.getMqServers(),
-                        canalAdapter.getInstance(),
-                        group.getGroupId(),
-                        canalOuterAdapterGroups,
-                        canalClientConfig.getFlatMessage());
-                    canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + group.getGroupId(), canalKafkaWorker);
-                    canalKafkaWorker.start();
-                    logger.info("Start adapter for canal-client mq topic: {} succeed", canalAdapter.getInstance() + "-"
-                                                                                       + group.getGroupId());
-                }
-            }
-        } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) {
-            // 初始化canal-client-rocketMQ的适配器
-            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
-                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
-                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
-                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
-                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
-                        loadConnector(config, canalOuterAdapters);
-                    }
-                    canalOuterAdapterGroups.add(canalOuterAdapters);
-                    CanalAdapterRocketMQWorker rocketMQWorker = new CanalAdapterRocketMQWorker(canalClientConfig,
-                        canalClientConfig.getMqServers(),
-                        canalAdapter.getInstance(),
-                        group.getGroupId(),
-                        canalOuterAdapterGroups,
-                        canalClientConfig.getAccessKey(),
-                        canalClientConfig.getSecretKey(),
-                        canalClientConfig.getFlatMessage());
-                    canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + group.getGroupId(), rocketMQWorker);
-                    rocketMQWorker.start();
-
-                    logger.info("Start adapter for canal-client mq topic: {} succeed", canalAdapter.getInstance() + "-"
-                                                                                       + group.getGroupId());
-                }
-            }
-        }
-    }
-
-    private void loadConnector(OuterAdapterConfig config, List<OuterAdapter> canalOutConnectors) {
-        try {
-            OuterAdapter adapter;
-            adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey()));
-
-            ClassLoader cl = Thread.currentThread().getContextClassLoader();
-            // 替换ClassLoader
-            Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader());
-            adapter.init(config);
-            Thread.currentThread().setContextClassLoader(cl);
-            canalOutConnectors.add(adapter);
-            logger.info("Load canal adapter: {} succeed", config.getName());
-        } catch (Exception e) {
-            logger.error("Load canal adapter: {} failed", config.getName(), e);
-        }
-    }
-
-    /**
-     * 销毁所有适配器 为防止canal实例太多造成销毁阻塞, 并行销毁
-     */
-    public void destroy() {
-        if (!canalWorkers.isEmpty()) {
-            ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
-            for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
-                futures.add(stopExecutorService.submit(() -> {
-                    canalAdapterWorker.stop();
-                    return true;
-                }));
-            }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
-                    // ignore
-                }
-            });
-            stopExecutorService.shutdown();
-        }
-
-        if (!canalMQWorker.isEmpty()) {
-            ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
-            for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
-                futures.add(stopMQWorkerService.submit(() -> {
-                    canalAdapterMQWorker.stop();
-                    return true;
-                }));
-            }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
-                    // ignore
-                }
-            });
-            stopMQWorkerService.shutdown();
-        }
-        logger.info("All canal adapters destroyed");
-    }
-}
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.*;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.core.env.EnumerablePropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.core.env.PropertySource;
+import org.springframework.core.env.StandardEnvironment;
+
+import com.alibaba.otter.canal.adapter.launcher.config.SpringContext;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+
+/**
+ * 外部适配器的加载器
+ *
+ * @version 1.0.0
+ */
+public class CanalAdapterLoader {
+
+    private static final Logger                     logger        = LoggerFactory.getLogger(CanalAdapterLoader.class);
+
+    private CanalClientConfig                       canalClientConfig;
+
+    private Map<String, CanalAdapterWorker>         canalWorkers  = new HashMap<>();
+
+    private Map<String, AbstractCanalAdapterWorker> canalMQWorker = new HashMap<>();
+
+    private ExtensionLoader<OuterAdapter>           loader;
+
+    public CanalAdapterLoader(CanalClientConfig canalClientConfig){
+        this.canalClientConfig = canalClientConfig;
+    }
+
+    /**
+     * 初始化canal-client
+     */
+    public void init() {
+        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
+
+        String canalServerHost = this.canalClientConfig.getCanalServerHost();
+        SocketAddress sa = null;
+        if (canalServerHost != null) {
+            String[] ipPort = canalServerHost.split(":");
+            sa = new InetSocketAddress(ipPort[0], Integer.parseInt(ipPort[1]));
+        }
+        String zkHosts = this.canalClientConfig.getZookeeperHosts();
+
+        if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+
+                for (CanalClientConfig.Group connectorGroup : canalAdapter.getGroups()) {
+                    List<OuterAdapter> canalOutConnectors = new ArrayList<>();
+                    for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) {
+                        loadAdapter(c, canalOutConnectors);
+                    }
+                    canalOuterAdapterGroups.add(canalOutConnectors);
+                }
+                CanalAdapterWorker worker;
+                if (sa != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        sa,
+                        canalOuterAdapterGroups);
+                } else if (zkHosts != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        zkHosts,
+                        canalOuterAdapterGroups);
+                } else {
+                    throw new RuntimeException("No canal server connector found");
+                }
+                canalWorkers.put(canalAdapter.getInstance(), worker);
+                worker.start();
+                logger.info("Start adapter for canal instance: {} succeed", canalAdapter.getInstance());
+            }
+        } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-kafka的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadAdapter(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+
+                    CanalAdapterKafkaWorker canalKafkaWorker = new CanalAdapterKafkaWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + group.getGroupId(), canalKafkaWorker);
+                    canalKafkaWorker.start();
+                    logger.info("Start adapter for canal-client mq topic: {} succeed",
+                        canalAdapter.getInstance() + "-" + group.getGroupId());
+                }
+            }
+        } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-rocketMQ的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadAdapter(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+                    CanalAdapterRocketMQWorker rocketMQWorker = new CanalAdapterRocketMQWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getAccessKey(),
+                        canalClientConfig.getSecretKey(),
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + group.getGroupId(), rocketMQWorker);
+                    rocketMQWorker.start();
+
+                    logger.info("Start adapter for canal-client mq topic: {} succeed",
+                        canalAdapter.getInstance() + "-" + group.getGroupId());
+                }
+            }
+        }
+    }
+
+    private void loadAdapter(OuterAdapterConfig config, List<OuterAdapter> canalOutConnectors) {
+        try {
+            OuterAdapter adapter;
+            adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey()));
+
+            ClassLoader cl = Thread.currentThread().getContextClassLoader();
+            // 替换ClassLoader
+            Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader());
+            Environment env = (Environment) SpringContext.getBean(Environment.class);
+            Properties evnProperties = null;
+            if (env instanceof StandardEnvironment) {
+                evnProperties = new Properties();
+                for (PropertySource<?> propertySource : ((StandardEnvironment) env).getPropertySources()) {
+                    if (propertySource instanceof EnumerablePropertySource) {
+                        String[] names = ((EnumerablePropertySource<?>) propertySource).getPropertyNames();
+                        for (String name : names) {
+                            Object val = propertySource.getProperty(name);
+                            if (val != null) {
+                                evnProperties.put(name, val);
+                            }
+                        }
+                    }
+                }
+            }
+            adapter.init(config, evnProperties);
+            Thread.currentThread().setContextClassLoader(cl);
+            canalOutConnectors.add(adapter);
+            logger.info("Load canal adapter: {} succeed", config.getName());
+        } catch (Exception e) {
+            logger.error("Load canal adapter: {} failed", config.getName(), e);
+        }
+    }
+
+    /**
+     * 销毁所有适配器 为防止canal实例太多造成销毁阻塞, 并行销毁
+     */
+    public void destroy() {
+        if (!canalWorkers.isEmpty()) {
+            ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
+                futures.add(stopExecutorService.submit(() -> {
+                    canalAdapterWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopExecutorService.shutdown();
+        }
+
+        if (!canalMQWorker.isEmpty()) {
+            ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
+                futures.add(stopMQWorkerService.submit(() -> {
+                    canalAdapterMQWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopMQWorkerService.shutdown();
+        }
+        logger.info("All canal adapters destroyed");
+    }
+}

+ 2 - 1
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java

@@ -28,7 +28,8 @@ public class CanalAdapterRocketMQWorker extends AbstractCanalAdapterWorker {
         this.canalClientConfig = canalClientConfig;
         this.topic = topic;
         this.flatMessage = flatMessage;
-        this.canalDestination = topic;
+        super.canalDestination = topic;
+        super.groupId = groupId;
         this.connector = new RocketMQCanalConnector(nameServers,
             topic,
             groupId,

+ 246 - 323
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java

@@ -1,323 +1,246 @@
-package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.druid.pool.DruidDataSource;
-import com.alibaba.otter.canal.common.utils.NamedThreadFactory;
-import com.google.common.base.Joiner;
-import com.google.common.collect.MapMaker;
-
-/**
- * 基于数据库的远程配置装载器
- *
- * @author rewerma 2019-01-25 下午05:20:16
- * @version 1.0.0
- */
-public class DbRemoteConfigLoader implements RemoteConfigLoader {
-
-    private static final Logger      logger                 = LoggerFactory.getLogger(DbRemoteConfigLoader.class);
-
-    private DruidDataSource          dataSource;
-
-    private static volatile long     currentConfigTimestamp = 0;
-    private Map<String, ConfigItem>  remoteAdapterConfigs   = new MapMaker().makeMap();
-
-    private ScheduledExecutorService executor               = Executors.newScheduledThreadPool(2,
-        new NamedThreadFactory("remote-adapter-config-scan"));
-
-    private RemoteAdapterMonitor     remoteAdapterMonitor   = new RemoteAdapterMonitorImpl();
-
-    public DbRemoteConfigLoader(String driverName, String jdbcUrl, String jdbcUsername, String jdbcPassword){
-        dataSource = new DruidDataSource();
-        if (StringUtils.isEmpty(driverName)) {
-            driverName = "com.mysql.jdbc.Driver";
-        }
-        dataSource.setDriverClassName(driverName);
-        dataSource.setUrl(jdbcUrl);
-        dataSource.setUsername(jdbcUsername);
-        dataSource.setPassword(jdbcPassword);
-        dataSource.setInitialSize(1);
-        dataSource.setMinIdle(1);
-        dataSource.setMaxActive(1);
-        dataSource.setMaxWait(60000);
-        dataSource.setTimeBetweenEvictionRunsMillis(60000);
-        dataSource.setMinEvictableIdleTimeMillis(300000);
-        try {
-            dataSource.init();
-        } catch (SQLException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载远程application.yml配置
-     */
-    @Override
-    public void loadRemoteConfig() {
-        try {
-            // 加载远程adapter配置
-            ConfigItem configItem = getRemoteAdapterConfig();
-            if (configItem != null) {
-                if (configItem.getModifiedTime() != currentConfigTimestamp) {
-                    currentConfigTimestamp = configItem.getModifiedTime();
-                    overrideLocalCanalConfig(configItem.getContent());
-                    logger.info("## Loaded remote adapter config: application.yml");
-                }
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 获取远程application.yml配置
-     *
-     * @return 配置对象
-     */
-    private ConfigItem getRemoteAdapterConfig() {
-        String sql = "select name, content, modified_time from canal_config where id=2";
-        try (Connection conn = dataSource.getConnection();
-                Statement stmt = conn.createStatement();
-                ResultSet rs = stmt.executeQuery(sql)) {
-            if (rs.next()) {
-                ConfigItem configItem = new ConfigItem();
-                configItem.setId(2L);
-                configItem.setName(rs.getString("name"));
-                configItem.setContent(rs.getString("content"));
-                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-                return configItem;
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-        return null;
-    }
-
-    /**
-     * 覆盖本地application.yml文件
-     *
-     * @param content 文件内容
-     */
-    private void overrideLocalCanalConfig(String content) {
-        try (FileWriter writer = new FileWriter(getConfPath() + "application.yml")) {
-            writer.write(content);
-            writer.flush();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载adapter配置
-     */
-    @Override
-    public void loadRemoteAdapterConfigs() {
-        try {
-            // 加载远程adapter配置
-            loadModifiedAdapterConfigs();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * 加载有变动的adapter配置
-     */
-    @SuppressWarnings("unchecked")
-    private void loadModifiedAdapterConfigs() {
-        Map<String, ConfigItem>[] res = new Map[2];
-        Map<String, ConfigItem> remoteConfigStatus = new HashMap<>();
-        String sql = "select id, category, name, modified_time from canal_adapter_config";
-        try (Connection conn = dataSource.getConnection();
-                Statement stmt = conn.createStatement();
-                ResultSet rs = stmt.executeQuery(sql)) {
-            while (rs.next()) {
-                ConfigItem configItem = new ConfigItem();
-                configItem.setId(rs.getLong("id"));
-                configItem.setCategory(rs.getString("category"));
-                configItem.setName(rs.getString("name"));
-                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-                remoteConfigStatus.put(configItem.getCategory() + "/" + configItem.getName(), configItem);
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-
-        if (!remoteConfigStatus.isEmpty()) {
-            List<Long> changedIds = new ArrayList<>();
-
-            for (ConfigItem remoteConfigStat : remoteConfigStatus.values()) {
-                ConfigItem currentConfig = remoteAdapterConfigs
-                    .get(remoteConfigStat.getCategory() + "/" + remoteConfigStat.getName());
-                if (currentConfig == null) {
-                    // 新增
-                    changedIds.add(remoteConfigStat.getId());
-                } else {
-                    // 修改
-                    if (currentConfig.getModifiedTime() != remoteConfigStat.getModifiedTime()) {
-                        changedIds.add(remoteConfigStat.getId());
-                    }
-                }
-            }
-            if (!changedIds.isEmpty()) {
-                String contentsSql = "select id, category, name, content, modified_time from canal_adapter_config  where id in ("
-                                     + Joiner.on(",").join(changedIds) + ")";
-                try (Connection conn = dataSource.getConnection();
-                        Statement stmt = conn.createStatement();
-                        ResultSet rs = stmt.executeQuery(contentsSql)) {
-                    while (rs.next()) {
-                        ConfigItem configItemNew = new ConfigItem();
-                        configItemNew.setId(rs.getLong("id"));
-                        configItemNew.setCategory(rs.getString("category"));
-                        configItemNew.setName(rs.getString("name"));
-                        configItemNew.setContent(rs.getString("content"));
-                        configItemNew.setModifiedTime(rs.getTimestamp("modified_time").getTime());
-
-                        remoteAdapterConfigs.put(configItemNew.getCategory() + "/" + configItemNew.getName(),
-                            configItemNew);
-                        remoteAdapterMonitor.onModify(configItemNew);
-                    }
-
-                } catch (Exception e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
-        }
-
-        for (ConfigItem configItem : remoteAdapterConfigs.values()) {
-            if (!remoteConfigStatus.containsKey(configItem.getCategory() + "/" + configItem.getName())) {
-                // 删除
-                remoteAdapterConfigs.remove(configItem.getCategory() + "/" + configItem.getName());
-                remoteAdapterMonitor.onDelete(configItem.getCategory() + "/" + configItem.getName());
-            }
-        }
-    }
-
-    private static boolean deleteDir(File dirFile) {
-        if (!dirFile.exists()) {
-            return false;
-        }
-
-        if (dirFile.isFile()) {
-            return dirFile.delete();
-        } else {
-            File[] files = dirFile.listFiles();
-            if (files == null || files.length == 0) {
-                return dirFile.delete();
-            }
-            for (File file : files) {
-                deleteDir(file);
-            }
-        }
-
-        return dirFile.delete();
-    }
-
-    /**
-     * 获取conf文件夹所在路径
-     *
-     * @return 路径地址
-     */
-    private String getConfPath() {
-        String classpath = this.getClass().getResource("/").getPath();
-        String confPath = classpath + "../conf/";
-        if (new File(confPath).exists()) {
-            return confPath;
-        } else {
-            return classpath;
-        }
-    }
-
-    /**
-     * 启动监听数据库变化
-     */
-    @Override
-    public void startMonitor() {
-        // 监听application.yml变化
-        executor.scheduleWithFixedDelay(() -> {
-            try {
-                loadRemoteConfig();
-            } catch (Throwable e) {
-                logger.error("scan remote application.yml failed", e);
-            }
-        }, 10, 3, TimeUnit.SECONDS);
-
-        // 监听adapter变化
-        executor.scheduleWithFixedDelay(() -> {
-            try {
-                loadRemoteAdapterConfigs();
-            } catch (Throwable e) {
-                logger.error("scan remote adapter configs failed", e);
-            }
-        }, 10, 3, TimeUnit.SECONDS);
-    }
-
-    /**
-     * 销毁
-     */
-    @Override
-    public void destroy() {
-        executor.shutdownNow();
-        try {
-            dataSource.close();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    private class RemoteAdapterMonitorImpl implements RemoteAdapterMonitor {
-
-        @Override
-        public void onAdd(ConfigItem configItem) {
-            this.onModify(configItem);
-        }
-
-        @Override
-        public void onModify(ConfigItem configItem) {
-            String confPath = getConfPath();
-            String category = configItem.getCategory();
-            File categoryDir = new File(confPath + category);
-            if (!categoryDir.isDirectory()) {
-                boolean mkDirs = categoryDir.mkdirs();
-                if (!mkDirs) {
-                    logger.info("## Create adapter category dir error: {}", category);
-                    return;
-                }
-            }
-            String name = configItem.getName();
-            try (FileWriter writer = new FileWriter(
-                    confPath + category + "/" + configItem.getName())) {
-                writer.write(configItem.getContent());
-                writer.flush();
-                logger.info("## Loaded remote adapter config: {}/{}", category, name);
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onDelete(String name) {
-            File file = new File(getConfPath() + name);
-            if (file.exists()) {
-                deleteDir(file);
-                logger.info("## Deleted and reloaded remote adapter config: {}", name);
-            }
-        }
-    }
-}
+package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
+
+import java.io.FileWriter;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.common.utils.CommonUtils;
+import com.alibaba.otter.canal.common.utils.NamedThreadFactory;
+import com.google.common.base.Joiner;
+import com.google.common.collect.MapMaker;
+
+/**
+ * 基于数据库的远程配置装载器
+ *
+ * @author rewerma 2019-01-25 下午05:20:16
+ * @version 1.0.0
+ */
+public class DbRemoteConfigLoader implements RemoteConfigLoader {
+
+    private static final Logger      logger                 = LoggerFactory.getLogger(DbRemoteConfigLoader.class);
+
+    private DruidDataSource          dataSource;
+
+    private static volatile long     currentConfigTimestamp = 0;
+    private Map<String, ConfigItem>  remoteAdapterConfigs   = new MapMaker().makeMap();
+
+    private ScheduledExecutorService executor               = Executors.newScheduledThreadPool(2,
+        new NamedThreadFactory("remote-adapter-config-scan"));
+
+    private RemoteAdapterMonitor     remoteAdapterMonitor   = new RemoteAdapterMonitorImpl();
+
+    public DbRemoteConfigLoader(String driverName, String jdbcUrl, String jdbcUsername, String jdbcPassword){
+        dataSource = new DruidDataSource();
+        if (StringUtils.isEmpty(driverName)) {
+            driverName = "com.mysql.jdbc.Driver";
+        }
+        dataSource.setDriverClassName(driverName);
+        dataSource.setUrl(jdbcUrl);
+        dataSource.setUsername(jdbcUsername);
+        dataSource.setPassword(jdbcPassword);
+        dataSource.setInitialSize(1);
+        dataSource.setMinIdle(1);
+        dataSource.setMaxActive(1);
+        dataSource.setMaxWait(60000);
+        dataSource.setTimeBetweenEvictionRunsMillis(60000);
+        dataSource.setMinEvictableIdleTimeMillis(300000);
+        try {
+            dataSource.init();
+        } catch (SQLException e) {
+            throw new RuntimeException(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载远程application.yml配置
+     */
+    @Override
+    public void loadRemoteConfig() {
+        try {
+            // 加载远程adapter配置
+            ConfigItem configItem = getRemoteAdapterConfig();
+            if (configItem != null) {
+                if (configItem.getModifiedTime() != currentConfigTimestamp) {
+                    currentConfigTimestamp = configItem.getModifiedTime();
+                    overrideLocalCanalConfig(configItem.getContent());
+                    logger.info("## Loaded remote adapter config: application.yml");
+                }
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 获取远程application.yml配置
+     *
+     * @return 配置对象
+     */
+    private ConfigItem getRemoteAdapterConfig() {
+        String sql = "select name, content, modified_time from canal_config where id=2";
+        try (Connection conn = dataSource.getConnection();
+                Statement stmt = conn.createStatement();
+                ResultSet rs = stmt.executeQuery(sql)) {
+            if (rs.next()) {
+                ConfigItem configItem = new ConfigItem();
+                configItem.setId(2L);
+                configItem.setName(rs.getString("name"));
+                configItem.setContent(rs.getString("content"));
+                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+                return configItem;
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        return null;
+    }
+
+    /**
+     * 覆盖本地application.yml文件
+     *
+     * @param content 文件内容
+     */
+    private void overrideLocalCanalConfig(String content) {
+        try (FileWriter writer = new FileWriter(CommonUtils.getConfPath() + "application.yml")) {
+            writer.write(content);
+            writer.flush();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载adapter配置
+     */
+    @Override
+    public void loadRemoteAdapterConfigs() {
+        try {
+            // 加载远程adapter配置
+            loadModifiedAdapterConfigs();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 加载有变动的adapter配置
+     */
+    private void loadModifiedAdapterConfigs() {
+        Map<String, ConfigItem> remoteConfigStatus = new HashMap<>();
+        String sql = "select id, category, name, modified_time from canal_adapter_config";
+        try (Connection conn = dataSource.getConnection();
+                Statement stmt = conn.createStatement();
+                ResultSet rs = stmt.executeQuery(sql)) {
+            while (rs.next()) {
+                ConfigItem configItem = new ConfigItem();
+                configItem.setId(rs.getLong("id"));
+                configItem.setCategory(rs.getString("category"));
+                configItem.setName(rs.getString("name"));
+                configItem.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+                remoteConfigStatus.put(configItem.getCategory() + "/" + configItem.getName(), configItem);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+
+        if (!remoteConfigStatus.isEmpty()) {
+            List<Long> changedIds = new ArrayList<>();
+
+            for (ConfigItem remoteConfigStat : remoteConfigStatus.values()) {
+                ConfigItem currentConfig = remoteAdapterConfigs
+                    .get(remoteConfigStat.getCategory() + "/" + remoteConfigStat.getName());
+                if (currentConfig == null) {
+                    // 新增
+                    changedIds.add(remoteConfigStat.getId());
+                } else {
+                    // 修改
+                    if (currentConfig.getModifiedTime() != remoteConfigStat.getModifiedTime()) {
+                        changedIds.add(remoteConfigStat.getId());
+                    }
+                }
+            }
+            if (!changedIds.isEmpty()) {
+                String contentsSql = "select id, category, name, content, modified_time from canal_adapter_config  where id in ("
+                                     + Joiner.on(",").join(changedIds) + ")";
+                try (Connection conn = dataSource.getConnection();
+                        Statement stmt = conn.createStatement();
+                        ResultSet rs = stmt.executeQuery(contentsSql)) {
+                    while (rs.next()) {
+                        ConfigItem configItemNew = new ConfigItem();
+                        configItemNew.setId(rs.getLong("id"));
+                        configItemNew.setCategory(rs.getString("category"));
+                        configItemNew.setName(rs.getString("name"));
+                        configItemNew.setContent(rs.getString("content"));
+                        configItemNew.setModifiedTime(rs.getTimestamp("modified_time").getTime());
+
+                        remoteAdapterConfigs.put(configItemNew.getCategory() + "/" + configItemNew.getName(),
+                            configItemNew);
+                        remoteAdapterMonitor.onModify(configItemNew);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+        }
+
+        for (ConfigItem configItem : remoteAdapterConfigs.values()) {
+            if (!remoteConfigStatus.containsKey(configItem.getCategory() + "/" + configItem.getName())) {
+                // 删除
+                remoteAdapterConfigs.remove(configItem.getCategory() + "/" + configItem.getName());
+                remoteAdapterMonitor.onDelete(configItem.getCategory() + "/" + configItem.getName());
+            }
+        }
+    }
+
+    /**
+     * 启动监听数据库变化
+     */
+    @Override
+    public void startMonitor() {
+        // 监听application.yml变化
+        executor.scheduleWithFixedDelay(() -> {
+            try {
+                loadRemoteConfig();
+            } catch (Throwable e) {
+                logger.error("scan remote application.yml failed", e);
+            }
+        }, 10, 3, TimeUnit.SECONDS);
+
+        // 监听adapter变化
+        executor.scheduleWithFixedDelay(() -> {
+            try {
+                loadRemoteAdapterConfigs();
+            } catch (Throwable e) {
+                logger.error("scan remote adapter configs failed", e);
+            }
+        }, 10, 3, TimeUnit.SECONDS);
+    }
+
+    /**
+     * 销毁
+     */
+    @Override
+    public void destroy() {
+        executor.shutdownNow();
+        try {
+            dataSource.close();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+}

+ 56 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/RemoteAdapterMonitorImpl.java

@@ -0,0 +1,56 @@
+package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
+
+import com.alibaba.otter.canal.common.utils.CommonUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileWriter;
+
+/**
+ * 远程配置监听器实现
+ *
+ * @author rewerma 2019-01-25 下午05:20:16
+ * @version 1.0.0
+ */
+public class RemoteAdapterMonitorImpl implements RemoteAdapterMonitor {
+
+    private static final Logger logger = LoggerFactory.getLogger(RemoteAdapterMonitorImpl.class);
+
+    @Override
+    public void onAdd(ConfigItem configItem) {
+        this.onModify(configItem);
+    }
+
+    @Override
+    public void onModify(ConfigItem configItem) {
+        String confPath = CommonUtils.getConfPath();
+        String category = configItem.getCategory();
+        File categoryDir = new File(confPath + category);
+        if (!categoryDir.isDirectory()) {
+            boolean mkDirs = categoryDir.mkdirs();
+            if (!mkDirs) {
+                logger.info("## Create adapter category dir error: {}", category);
+                return;
+            }
+        }
+        String name = configItem.getName();
+        try (FileWriter writer = new FileWriter(confPath + category + "/" + configItem.getName())) {
+            writer.write(configItem.getContent());
+            writer.flush();
+            logger.info("## Loaded remote adapter config: {}/{}", category, name);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    @Override
+    public void onDelete(String name) {
+        File file = new File(CommonUtils.getConfPath() + name);
+        if (file.exists()) {
+            CommonUtils.deleteDir(file);
+            logger.info("## Deleted and reloaded remote adapter config: {}", name);
+        }
+    }
+
+}

+ 2 - 1
client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java

@@ -1,6 +1,7 @@
 package com.alibaba.otter.canal.client.adapter.logger;
 
 import java.util.List;
+import java.util.Properties;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -25,7 +26,7 @@ public class LoggerAdapterExample implements OuterAdapter {
     private Logger logger = LoggerFactory.getLogger(this.getClass());
 
     @Override
-    public void init(OuterAdapterConfig configuration) {
+    public void init(OuterAdapterConfig configuration, Properties envProperties) {
 
     }
 

+ 0 - 7
client-adapter/rdb/pom.xml

@@ -18,13 +18,6 @@
             <version>${project.version}</version>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.yaml</groupId>
-            <artifactId>snakeyaml</artifactId>
-            <version>1.19</version>
-            <scope>provided</scope>
-        </dependency>
-
         <dependency>
             <groupId>mysql</groupId>
             <artifactId>mysql-connector-java</artifactId>

+ 17 - 15
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java

@@ -5,10 +5,8 @@ import java.sql.SQLException;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
 
 import javax.sql.DataSource;
 
@@ -27,12 +25,7 @@ import com.alibaba.otter.canal.client.adapter.rdb.service.RdbEtlService;
 import com.alibaba.otter.canal.client.adapter.rdb.service.RdbMirrorDbSyncService;
 import com.alibaba.otter.canal.client.adapter.rdb.service.RdbSyncService;
 import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
-import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
-import com.alibaba.otter.canal.client.adapter.support.Dml;
-import com.alibaba.otter.canal.client.adapter.support.EtlResult;
-import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
-import com.alibaba.otter.canal.client.adapter.support.SPI;
-import com.alibaba.otter.canal.client.adapter.support.Util;
+import com.alibaba.otter.canal.client.adapter.support.*;
 
 /**
  * RDB适配器实现类
@@ -56,6 +49,8 @@ public class RdbAdapter implements OuterAdapter {
 
     private RdbConfigMonitor                        rdbConfigMonitor;
 
+    private Properties                              envProperties;
+
     public Map<String, MappingConfig> getRdbMapping() {
         return rdbMapping;
     }
@@ -74,8 +69,9 @@ public class RdbAdapter implements OuterAdapter {
      * @param configuration 外部适配器配置信息
      */
     @Override
-    public void init(OuterAdapterConfig configuration) {
-        Map<String, MappingConfig> rdbMappingTmp = ConfigLoader.load();
+    public void init(OuterAdapterConfig configuration, Properties envProperties) {
+        this.envProperties = envProperties;
+        Map<String, MappingConfig> rdbMappingTmp = ConfigLoader.load(envProperties);
         // 过滤不匹配的key的配置
         rdbMappingTmp.forEach((key, mappingConfig) -> {
             if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null)
@@ -93,9 +89,15 @@ public class RdbAdapter implements OuterAdapter {
             String configName = entry.getKey();
             MappingConfig mappingConfig = entry.getValue();
             if (!mappingConfig.getDbMapping().getMirrorDb()) {
-                String key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                             + mappingConfig.getDbMapping().getDatabase() + "."
-                             + mappingConfig.getDbMapping().getTable();
+                String key;
+                if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                    key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-"
+                          + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_"
+                          + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable();
+                } else {
+                    key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_"
+                          + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable();
+                }
                 Map<String, MappingConfig> configMap = mappingConfigCache.computeIfAbsent(key,
                     k1 -> new ConcurrentHashMap<>());
                 configMap.put(configName, mappingConfig);
@@ -158,7 +160,7 @@ public class RdbAdapter implements OuterAdapter {
             return;
         }
         try {
-            rdbSyncService.sync(mappingConfigCache, dmls);
+            rdbSyncService.sync(mappingConfigCache, dmls, envProperties);
             rdbMirrorDbSyncService.sync(dmls);
         } catch (Exception e) {
             throw new RuntimeException(e);

+ 51 - 50
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java

@@ -1,50 +1,51 @@
-package com.alibaba.otter.canal.client.adapter.rdb.config;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import com.alibaba.fastjson.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
-
-/**
- * RDB表映射配置加载器
- *
- * @author rewerma 2018-11-07 下午02:41:34
- * @version 1.0.0
- */
-public class ConfigLoader {
-
-    private static Logger logger = LoggerFactory.getLogger(ConfigLoader.class);
-
-    /**
-     * 加载RDB表映射配置
-     *
-     * @return 配置名/配置文件名--对象
-     */
-    @SuppressWarnings("unchecked")
-    public static Map<String, MappingConfig> load() {
-        logger.info("## Start loading rdb mapping config ... ");
-
-        Map<String, MappingConfig> result = new LinkedHashMap<>();
-
-        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("rdb");
-        configContentMap.forEach((fileName, content) -> {
-            Map configMap = new Yaml().loadAs(content, Map.class); // yml自带的对象反射不是很稳定
-            JSONObject configJson = new JSONObject(configMap);
-            MappingConfig config = configJson.toJavaObject(MappingConfig.class);
-            try {
-                config.validate();
-            } catch (Exception e) {
-                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
-            }
-            result.put(fileName, config);
-        });
-
-        logger.info("## Rdb mapping config loaded");
-        return result;
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+
+/**
+ * RDB表映射配置加载器
+ *
+ * @author rewerma 2018-11-07 下午02:41:34
+ * @version 1.0.0
+ */
+public class ConfigLoader {
+
+    private static Logger logger = LoggerFactory.getLogger(ConfigLoader.class);
+
+    /**
+     * 加载RDB表映射配置
+     *
+     * @return 配置名/配置文件名--对象
+     */
+    public static Map<String, MappingConfig> load(Properties envProperties) {
+        logger.info("## Start loading rdb mapping config ... ");
+
+        Map<String, MappingConfig> result = new LinkedHashMap<>();
+
+        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("rdb");
+        configContentMap.forEach((fileName, content) -> {
+            MappingConfig config = YmlConfigBinder
+                .bindYmlToObj(null, content, MappingConfig.class, null, envProperties);
+            if (config == null) {
+                return;
+            }
+            try {
+                config.validate();
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
+            }
+            result.put(fileName, config);
+        });
+
+        logger.info("## Rdb mapping config loaded");
+        return result;
+    }
+}

+ 10 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java

@@ -17,6 +17,8 @@ public class MappingConfig {
 
     private String    destination;     // canal实例或MQ的topic
 
+    private String    groupId;         // groupId
+
     private String    outerAdapterKey; // 对应适配器的key
 
     private Boolean   concurrent;      // 是否并行同步
@@ -31,6 +33,14 @@ public class MappingConfig {
         this.dataSourceKey = dataSourceKey;
     }
 
+    public String getGroupId() {
+        return groupId;
+    }
+
+    public void setGroupId(String groupId) {
+        this.groupId = groupId;
+    }
+
     public String getOuterAdapterKey() {
         return outerAdapterKey;
     }

+ 1 - 1
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MirrorDbConfig.java

@@ -7,7 +7,7 @@ public class MirrorDbConfig {
 
     private String             fileName;
     private MappingConfig      mappingConfig;
-    Map<String, MappingConfig> tableConfig = new ConcurrentHashMap<>();
+    private Map<String, MappingConfig> tableConfig = new ConcurrentHashMap<>();
 
     public static MirrorDbConfig create(String fileName, MappingConfig mappingConfig) {
         return new MirrorDbConfig(fileName, mappingConfig);

+ 169 - 170
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java

@@ -1,170 +1,169 @@
-package com.alibaba.otter.canal.client.adapter.rdb.monitor;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-
-import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
-import org.apache.commons.io.filefilter.FileFilterUtils;
-import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
-import org.apache.commons.io.monitor.FileAlterationMonitor;
-import org.apache.commons.io.monitor.FileAlterationObserver;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
-import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-
-public class RdbConfigMonitor {
-
-    private static final Logger   logger      = LoggerFactory.getLogger(RdbConfigMonitor.class);
-
-    private static final String   adapterName = "rdb";
-
-    private String                key;
-
-    private RdbAdapter            rdbAdapter;
-
-    private FileAlterationMonitor fileMonitor;
-
-    public void init(String key, RdbAdapter rdbAdapter) {
-        this.key = key;
-        this.rdbAdapter = rdbAdapter;
-        File confDir = Util.getConfDirPath(adapterName);
-        try {
-            FileAlterationObserver observer = new FileAlterationObserver(confDir,
-                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
-            FileListener listener = new FileListener();
-            observer.addListener(listener);
-            fileMonitor = new FileAlterationMonitor(3000, observer);
-            fileMonitor.start();
-
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    public void destroy() {
-        try {
-            fileMonitor.stop();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    private class FileListener extends FileAlterationListenerAdaptor {
-
-        @Override
-        public void onFileCreate(File file) {
-            super.onFileCreate(file);
-            try {
-                // 加载新增的配置文件
-                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
-                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
-                config.validate();
-                if ((key == null && config.getOuterAdapterKey() == null)
-                    || (key != null && key.equals(config.getOuterAdapterKey()))) {
-                    addConfigToCache(file, config);
-
-                    logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onFileChange(File file) {
-            super.onFileChange(file);
-
-            try {
-                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                    // 加载配置文件
-                    String configContent = MappingConfigsLoader
-                        .loadConfig(adapterName + File.separator + file.getName());
-                    if (configContent == null) {
-                        onFileDelete(file);
-                        return;
-                    }
-                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
-                    config.validate();
-                    if ((key == null && config.getOuterAdapterKey() == null)
-                        || (key != null && key.equals(config.getOuterAdapterKey()))) {
-                        if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                            deleteConfigFromCache(file);
-                        }
-                        addConfigToCache(file, config);
-                    } else {
-                        // 不能修改outerAdapterKey
-                        throw new RuntimeException("Outer adapter key not allowed modify");
-                    }
-                    logger.info("Change a rdb mapping config: {} of canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onFileDelete(File file) {
-            super.onFileDelete(file);
-
-            try {
-                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
-                    deleteConfigFromCache(file);
-
-                    logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName());
-                }
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        private void addConfigToCache(File file, MappingConfig mappingConfig) {
-            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
-                return;
-            }
-            rdbAdapter.getRdbMapping().put(file.getName(), mappingConfig);
-            if (!mappingConfig.getDbMapping().getMirrorDb()) {
-                Map<String, MappingConfig> configMap = rdbAdapter.getMappingConfigCache()
-                    .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                                     + mappingConfig.getDbMapping().getDatabase() + "."
-                                     + mappingConfig.getDbMapping().getTable(),
-                        k1 -> new HashMap<>());
-                configMap.put(file.getName(), mappingConfig);
-            } else {
-                Map<String, MirrorDbConfig> mirrorDbConfigCache = rdbAdapter.getMirrorDbConfigCache();
-                mirrorDbConfigCache.put(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
-                                        + mappingConfig.getDbMapping().getDatabase(),
-                    MirrorDbConfig.create(file.getName(), mappingConfig));
-            }
-        }
-
-        private void deleteConfigFromCache(File file) {
-            MappingConfig mappingConfig = rdbAdapter.getRdbMapping().remove(file.getName());
-
-            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
-                return;
-            }
-            if (!mappingConfig.getDbMapping().getMirrorDb()) {
-                for (Map<String, MappingConfig> configMap : rdbAdapter.getMappingConfigCache().values()) {
-                    if (configMap != null) {
-                        configMap.remove(file.getName());
-                    }
-                }
-            } else {
-                rdbAdapter.getMirrorDbConfigCache().forEach((key, mirrorDbConfig) -> {
-                    if (mirrorDbConfig.getFileName().equals(file.getName())) {
-                        rdbAdapter.getMirrorDbConfigCache().remove(key);
-                    }
-                });
-            }
-
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.monitor;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+public class RdbConfigMonitor {
+
+    private static final Logger   logger      = LoggerFactory.getLogger(RdbConfigMonitor.class);
+
+    private static final String   adapterName = "rdb";
+
+    private String                key;
+
+    private RdbAdapter            rdbAdapter;
+
+    private FileAlterationMonitor fileMonitor;
+
+    public void init(String key, RdbAdapter rdbAdapter) {
+        this.key = key;
+        this.rdbAdapter = rdbAdapter;
+        File confDir = Util.getConfDirPath(adapterName);
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileCreate(File file) {
+            super.onFileCreate(file);
+            try {
+                // 加载新增的配置文件
+                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                config.validate();
+                if ((key == null && config.getOuterAdapterKey() == null)
+                    || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                    addConfigToCache(file, config);
+
+                    logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    // 加载配置文件
+                    String configContent = MappingConfigsLoader
+                        .loadConfig(adapterName + File.separator + file.getName());
+                    if (configContent == null) {
+                        onFileDelete(file);
+                        return;
+                    }
+                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    config.validate();
+                    if ((key == null && config.getOuterAdapterKey() == null)
+                        || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                        if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                            deleteConfigFromCache(file);
+                        }
+                        addConfigToCache(file, config);
+                    } else {
+                        // 不能修改outerAdapterKey
+                        throw new RuntimeException("Outer adapter key not allowed modify");
+                    }
+                    logger.info("Change a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileDelete(File file) {
+            super.onFileDelete(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    deleteConfigFromCache(file);
+
+                    logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        private void addConfigToCache(File file, MappingConfig mappingConfig) {
+            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
+                return;
+            }
+            rdbAdapter.getRdbMapping().put(file.getName(), mappingConfig);
+            if (!mappingConfig.getDbMapping().getMirrorDb()) {
+                Map<String, MappingConfig> configMap = rdbAdapter.getMappingConfigCache()
+                    .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                                     + mappingConfig.getDbMapping().getDatabase() + "."
+                                     + mappingConfig.getDbMapping().getTable(),
+                        k1 -> new HashMap<>());
+                configMap.put(file.getName(), mappingConfig);
+            } else {
+                Map<String, MirrorDbConfig> mirrorDbConfigCache = rdbAdapter.getMirrorDbConfigCache();
+                mirrorDbConfigCache.put(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                                        + mappingConfig.getDbMapping().getDatabase(),
+                    MirrorDbConfig.create(file.getName(), mappingConfig));
+            }
+        }
+
+        private void deleteConfigFromCache(File file) {
+            MappingConfig mappingConfig = rdbAdapter.getRdbMapping().remove(file.getName());
+
+            if (mappingConfig == null || mappingConfig.getDbMapping() == null) {
+                return;
+            }
+            if (!mappingConfig.getDbMapping().getMirrorDb()) {
+                for (Map<String, MappingConfig> configMap : rdbAdapter.getMappingConfigCache().values()) {
+                    if (configMap != null) {
+                        configMap.remove(file.getName());
+                    }
+                }
+            } else {
+                rdbAdapter.getMirrorDbConfigCache().forEach((key, mirrorDbConfig) -> {
+                    if (mirrorDbConfig.getFileName().equals(file.getName())) {
+                        rdbAdapter.getMirrorDbConfigCache().remove(key);
+                    }
+                });
+            }
+
+        }
+    }
+}

+ 101 - 91
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java

@@ -13,12 +13,12 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import javax.sql.DataSource;
 
-import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
 import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
 import com.alibaba.otter.canal.client.adapter.support.EtlResult;
 import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.google.common.base.Joiner;
@@ -36,7 +36,8 @@ public class RdbEtlService {
     /**
      * 导入数据
      */
-    public static EtlResult importData(DataSource srcDS, DataSource targetDS, MappingConfig config, List<String> params) {
+    public static EtlResult importData(DataSource srcDS, DataSource targetDS, MappingConfig config,
+                                       List<String> params) {
         EtlResult etlResult = new EtlResult();
         AtomicLong successCount = new AtomicLong();
         List<String> errMsg = new ArrayList<>();
@@ -53,8 +54,8 @@ public class RdbEtlService {
             long start = System.currentTimeMillis();
 
             // 拼接sql
-            StringBuilder sql = new StringBuilder("SELECT * FROM " + dbMapping.getDatabase() + "."
-                                                  + dbMapping.getTable());
+            StringBuilder sql = new StringBuilder(
+                "SELECT * FROM " + dbMapping.getDatabase() + "." + dbMapping.getTable());
 
             // 拼接条件
             appendCondition(params, dbMapping, srcDS, sql);
@@ -91,12 +92,8 @@ public class RdbEtlService {
                     } else {
                         sqlFinal = sql + " LIMIT " + offset + "," + cnt;
                     }
-                    Future<Boolean> future = executor.submit(() -> executeSqlImport(srcDS,
-                        targetDS,
-                        sqlFinal,
-                        dbMapping,
-                        successCount,
-                        errMsg));
+                    Future<Boolean> future = executor
+                        .submit(() -> executeSqlImport(srcDS, targetDS, sqlFinal, dbMapping, successCount, errMsg));
                     futures.add(future);
                 }
 
@@ -109,11 +106,11 @@ public class RdbEtlService {
                 executeSqlImport(srcDS, targetDS, sql.toString(), dbMapping, successCount, errMsg);
             }
 
-            logger.info(dbMapping.getTable() + " etl completed in: " + (System.currentTimeMillis() - start) / 1000
-                        + "s!");
+            logger.info(
+                dbMapping.getTable() + " etl completed in: " + (System.currentTimeMillis() - start) / 1000 + "s!");
 
-            etlResult.setResultMessage("导入目标表 " + SyncUtil.getDbTableName(dbMapping) + " 数据:" + successCount.get()
-                                       + " 条");
+            etlResult
+                .setResultMessage("导入目标表 " + SyncUtil.getDbTableName(dbMapping) + " 数据:" + successCount.get() + " 条");
         } catch (Exception e) {
             logger.error(e.getMessage(), e);
             errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
@@ -127,8 +124,8 @@ public class RdbEtlService {
         return etlResult;
     }
 
-    private static void appendCondition(List<String> params, DbMapping dbMapping, DataSource ds, StringBuilder sql)
-                                                                                                                   throws SQLException {
+    private static void appendCondition(List<String> params, DbMapping dbMapping, DataSource ds,
+                                        StringBuilder sql) throws SQLException {
         if (params != null && params.size() == 1 && dbMapping.getEtlCondition() == null) {
             AtomicBoolean stExists = new AtomicBoolean(false);
             // 验证是否有SYS_TIME字段
@@ -145,9 +142,9 @@ public class RdbEtlService {
                     }
                 } catch (Exception e) {
                     // ignore
-            }
-            return null;
-        }   );
+                }
+                return null;
+            });
             if (stExists.get()) {
                 sql.append(" WHERE SYS_TIME >= '").append(params.get(0)).append("' ");
             }
@@ -168,13 +165,12 @@ public class RdbEtlService {
     private static boolean executeSqlImport(DataSource srcDS, DataSource targetDS, String sql, DbMapping dbMapping,
                                             AtomicLong successCount, List<String> errMsg) {
         try {
-            Util.sqlRS(srcDS, sql, rs -> {
-                int idx = 1;
+            Map<String, String> columnsMap = new LinkedHashMap<>();
+            Map<String, Integer> columnType = new LinkedHashMap<>();
 
+            Util.sqlRS(targetDS, "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " LIMIT 1 ", rs -> {
                 try {
-                    boolean completed = false;
 
-                    Map<String, Integer> columnType = new LinkedHashMap<>();
                     ResultSetMetaData rsd = rs.getMetaData();
                     int columnCount = rsd.getColumnCount();
                     List<String> columns = new ArrayList<>();
@@ -183,91 +179,105 @@ public class RdbEtlService {
                         columns.add(rsd.getColumnName(i));
                     }
 
-                    Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, columns);
+                    columnsMap.putAll(SyncUtil.getColumnsMap(dbMapping, columns));
+                    return true;
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                    return false;
+                }
+            });
+
+            Util.sqlRS(srcDS, sql, rs -> {
+                int idx = 1;
+
+                try {
+                    boolean completed = false;
+
                     // if (dbMapping.isMapAll()) {
                     // columnsMap = dbMapping.getAllColumns();
                     // } else {
                     // columnsMap = dbMapping.getTargetColumns();
                     // }
 
-                StringBuilder insertSql = new StringBuilder();
-                insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
-                columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+                    StringBuilder insertSql = new StringBuilder();
+                    insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
+                    columnsMap
+                            .forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
 
-                int len = insertSql.length();
-                insertSql.delete(len - 1, len).append(") VALUES (");
-                int mapLen = columnsMap.size();
-                for (int i = 0; i < mapLen; i++) {
-                    insertSql.append("?,");
-                }
-                len = insertSql.length();
-                insertSql.delete(len - 1, len).append(")");
-                try (Connection connTarget = targetDS.getConnection();
-                        PreparedStatement pstmt = connTarget.prepareStatement(insertSql.toString())) {
-                    connTarget.setAutoCommit(false);
-
-                    while (rs.next()) {
-                        pstmt.clearParameters();
-
-                        // 删除数据
-                        Map<String, Object> values = new LinkedHashMap<>();
-                        StringBuilder deleteSql = new StringBuilder("DELETE FROM " + SyncUtil.getDbTableName(dbMapping)
-                                                                    + " WHERE ");
-                        appendCondition(dbMapping, deleteSql, values, rs);
-                        try (PreparedStatement pstmt2 = connTarget.prepareStatement(deleteSql.toString())) {
-                            int k = 1;
-                            for (Object val : values.values()) {
-                                pstmt2.setObject(k++, val);
+                    int len = insertSql.length();
+                    insertSql.delete(len - 1, len).append(") VALUES (");
+                    int mapLen = columnsMap.size();
+                    for (int i = 0; i < mapLen; i++) {
+                        insertSql.append("?,");
+                    }
+                    len = insertSql.length();
+                    insertSql.delete(len - 1, len).append(")");
+                    try (Connection connTarget = targetDS.getConnection();
+                         PreparedStatement pstmt = connTarget.prepareStatement(insertSql.toString())) {
+                        connTarget.setAutoCommit(false);
+
+                        while (rs.next()) {
+                            pstmt.clearParameters();
+
+                            // 删除数据
+                            Map<String, Object> values = new LinkedHashMap<>();
+                            StringBuilder deleteSql = new StringBuilder(
+                                    "DELETE FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE ");
+                            appendCondition(dbMapping, deleteSql, values, rs);
+                            try (PreparedStatement pstmt2 = connTarget.prepareStatement(deleteSql.toString())) {
+                                int k = 1;
+                                for (Object val : values.values()) {
+                                    pstmt2.setObject(k++, val);
+                                }
+                                pstmt2.execute();
                             }
-                            pstmt2.execute();
-                        }
 
-                        int i = 1;
-                        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
-                            String targetClolumnName = entry.getKey();
-                            String srcColumnName = entry.getValue();
-                            if (srcColumnName == null) {
-                                srcColumnName = targetClolumnName;
-                            }
+                            int i = 1;
+                            for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+                                String targetClolumnName = entry.getKey();
+                                String srcColumnName = entry.getValue();
+                                if (srcColumnName == null) {
+                                    srcColumnName = targetClolumnName;
+                                }
 
-                            Integer type = columnType.get(targetClolumnName.toLowerCase());
+                                Integer type = columnType.get(targetClolumnName.toLowerCase());
 
-                            Object value = rs.getObject(srcColumnName);
-                            if (value != null) {
-                                SyncUtil.setPStmt(type, pstmt, value, i);
-                            } else {
-                                pstmt.setNull(i, type);
+                                Object value = rs.getObject(srcColumnName);
+                                if (value != null) {
+                                    SyncUtil.setPStmt(type, pstmt, value, i);
+                                } else {
+                                    pstmt.setNull(i, type);
+                                }
+
+                                i++;
                             }
 
-                            i++;
-                        }
+                            pstmt.execute();
+                            if (logger.isTraceEnabled()) {
+                                logger.trace("Insert into target table, sql: {}", insertSql);
+                            }
 
-                        pstmt.execute();
-                        if (logger.isTraceEnabled()) {
-                            logger.trace("Insert into target table, sql: {}", insertSql);
+                            if (idx % dbMapping.getCommitBatch() == 0) {
+                                connTarget.commit();
+                                completed = true;
+                            }
+                            idx++;
+                            successCount.incrementAndGet();
+                            if (logger.isDebugEnabled()) {
+                                logger.debug("successful import count:" + successCount.get());
+                            }
                         }
-
-                        if (idx % dbMapping.getCommitBatch() == 0) {
+                        if (!completed) {
                             connTarget.commit();
-                            completed = true;
-                        }
-                        idx++;
-                        successCount.incrementAndGet();
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("successful import count:" + successCount.get());
                         }
                     }
-                    if (!completed) {
-                        connTarget.commit();
-                    }
-                }
 
-            } catch (Exception e) {
-                logger.error(dbMapping.getTable() + " etl failed! ==>" + e.getMessage(), e);
-                errMsg.add(dbMapping.getTable() + " etl failed! ==>" + e.getMessage());
-            }
-            return idx;
-        }   );
+                } catch (Exception e) {
+                    logger.error(dbMapping.getTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(dbMapping.getTable() + " etl failed! ==>" + e.getMessage());
+                }
+                return idx;
+            });
             return true;
         } catch (Exception e) {
             logger.error(e.getMessage(), e);
@@ -278,8 +288,8 @@ public class RdbEtlService {
     /**
      * 拼接目标表主键where条件
      */
-    private static void appendCondition(DbMapping dbMapping, StringBuilder sql, Map<String, Object> values, ResultSet rs)
-                                                                                                                         throws SQLException {
+    private static void appendCondition(DbMapping dbMapping, StringBuilder sql, Map<String, Object> values,
+                                        ResultSet rs) throws SQLException {
         // 拼接主键
         for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
             String targetColumnName = entry.getKey();

+ 37 - 29
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java

@@ -56,6 +56,15 @@ public class RdbMirrorDbSyncService {
             if (mirrorDbConfig == null) {
                 continue;
             }
+            if (mirrorDbConfig.getMappingConfig() == null) {
+                continue;
+            }
+            if (dml.getGroupId() != null && StringUtils.isNotEmpty(mirrorDbConfig.getMappingConfig().getGroupId())) {
+                if (!mirrorDbConfig.getMappingConfig().getGroupId().equals(dml.getGroupId())) {
+                    continue; // 如果groupId不匹配则过滤
+                }
+            }
+
             if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
                 // DDL
                 if (logger.isDebugEnabled()) {
@@ -71,37 +80,35 @@ public class RdbMirrorDbSyncService {
             }
         }
         if (!dmlList.isEmpty()) {
-            rdbSyncService.sync(dmlList,
-                dml -> {
-                    MirrorDbConfig mirrorDbConfig = mirrorDbConfigCache.get(dml.getDestination() + "."
-                                                                            + dml.getDatabase());
-                    if (mirrorDbConfig == null) {
-                        return false;
-                    }
-                    String table = dml.getTable();
-                    MappingConfig config = mirrorDbConfig.getTableConfig().get(table);
+            rdbSyncService.sync(dmlList, dml -> {
+                MirrorDbConfig mirrorDbConfig = mirrorDbConfigCache.get(dml.getDestination() + "." + dml.getDatabase());
+                if (mirrorDbConfig == null) {
+                    return false;
+                }
+                String table = dml.getTable();
+                MappingConfig config = mirrorDbConfig.getTableConfig().get(table);
 
-                    if (config == null) {
-                        return false;
-                    }
+                if (config == null) {
+                    return false;
+                }
 
-                    if (config.getConcurrent()) {
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            int hash = rdbSyncService.pkHash(config.getDbMapping(), singleDml.getData());
-                            RdbSyncService.SyncItem syncItem = new RdbSyncService.SyncItem(config, singleDml);
-                            rdbSyncService.getDmlsPartition()[hash].add(syncItem);
-                        });
-                    } else {
-                        int hash = 0;
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            RdbSyncService.SyncItem syncItem = new RdbSyncService.SyncItem(config, singleDml);
-                            rdbSyncService.getDmlsPartition()[hash].add(syncItem);
-                        });
-                    }
-                    return true;
-                });
+                if (config.getConcurrent()) {
+                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                    singleDmls.forEach(singleDml -> {
+                        int hash = rdbSyncService.pkHash(config.getDbMapping(), singleDml.getData());
+                        RdbSyncService.SyncItem syncItem = new RdbSyncService.SyncItem(config, singleDml);
+                        rdbSyncService.getDmlsPartition()[hash].add(syncItem);
+                    });
+                } else {
+                    int hash = 0;
+                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                    singleDmls.forEach(singleDml -> {
+                        RdbSyncService.SyncItem syncItem = new RdbSyncService.SyncItem(config, singleDml);
+                        rdbSyncService.getDmlsPartition()[hash].add(syncItem);
+                    });
+                }
+                return true;
+            });
         }
     }
 
@@ -119,6 +126,7 @@ public class RdbMirrorDbSyncService {
             mappingConfig = new MappingConfig();
             mappingConfig.setDataSourceKey(baseConfigMap.getDataSourceKey());
             mappingConfig.setDestination(baseConfigMap.getDestination());
+            mappingConfig.setGroupId(baseConfigMap.getGroupId());
             mappingConfig.setOuterAdapterKey(baseConfigMap.getOuterAdapterKey());
             mappingConfig.setConcurrent(baseConfigMap.getConcurrent());
             MappingConfig.DbMapping dbMapping = new MappingConfig.DbMapping();

+ 506 - 494
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java

@@ -1,494 +1,506 @@
-package com.alibaba.otter.canal.client.adapter.rdb.service;
-
-import java.sql.Connection;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.*;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.serializer.SerializerFeature;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
-import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
-import com.alibaba.otter.canal.client.adapter.support.Dml;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-
-/**
- * RDB同步操作业务
- *
- * @author rewerma 2018-11-7 下午06:45:49
- * @version 1.0.0
- */
-public class RdbSyncService {
-
-    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
-
-    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
-    private Map<String, Map<String, Integer>> columnsTypeCache;
-
-    private int                               threads = 3;
-    private boolean                           skipDupException;
-
-    private List<SyncItem>[]                  dmlsPartition;
-    private BatchExecutor[]                   batchExecutors;
-    private ExecutorService[]                 executorThreads;
-
-    public List<SyncItem>[] getDmlsPartition() {
-        return dmlsPartition;
-    }
-
-    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
-        return columnsTypeCache;
-    }
-
-    @SuppressWarnings("unchecked")
-    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
-        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
-    }
-
-    @SuppressWarnings("unchecked")
-    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
-                          boolean skipDupException){
-        this.columnsTypeCache = columnsTypeCache;
-        this.skipDupException = skipDupException;
-        try {
-            if (threads != null) {
-                this.threads = threads;
-            }
-            this.dmlsPartition = new List[this.threads];
-            this.batchExecutors = new BatchExecutor[this.threads];
-            this.executorThreads = new ExecutorService[this.threads];
-            for (int i = 0; i < this.threads; i++) {
-                dmlsPartition[i] = new ArrayList<>();
-                batchExecutors[i] = new BatchExecutor(dataSource);
-                executorThreads[i] = Executors.newSingleThreadExecutor();
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 批量同步回调
-     *
-     * @param dmls 批量 DML
-     * @param function 回调方法
-     */
-    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
-        try {
-            boolean toExecute = false;
-            for (Dml dml : dmls) {
-                if (!toExecute) {
-                    toExecute = function.apply(dml);
-                } else {
-                    function.apply(dml);
-                }
-            }
-            if (toExecute) {
-                List<Future> futures = new ArrayList<>();
-                for (int i = 0; i < threads; i++) {
-                    int j = i;
-                    futures.add(executorThreads[i].submit(() -> {
-                        try {
-                            dmlsPartition[j]
-                                .forEach(syncItem -> sync(batchExecutors[j], syncItem.config, syncItem.singleDml));
-                            dmlsPartition[j].clear();
-                            batchExecutors[j].commit();
-                            return true;
-                        } catch (Throwable e) {
-                            batchExecutors[j].rollback();
-                            throw new RuntimeException(e);
-                        }
-                    }));
-                }
-
-                futures.forEach(future -> {
-                    try {
-                        future.get();
-                    } catch (ExecutionException | InterruptedException e) {
-                        throw new RuntimeException(e);
-                    }
-                });
-            }
-        } finally {
-            for (BatchExecutor batchExecutor : batchExecutors) {
-                if (batchExecutor != null) {
-                    batchExecutor.close();
-                }
-            }
-        }
-    }
-
-    /**
-     * 批量同步
-     *
-     * @param mappingConfig 配置集合
-     * @param dmls 批量 DML
-     */
-    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls) {
-        sync(dmls, dml -> {
-            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
-                // DDL
-                columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
-                return false;
-            } else {
-                // DML
-                String destination = StringUtils.trimToEmpty(dml.getDestination());
-                String database = dml.getDatabase();
-                String table = dml.getTable();
-                Map<String, MappingConfig> configMap = mappingConfig.get(destination + "." + database + "." + table);
-
-                if (configMap == null) {
-                    return false;
-                }
-
-                boolean executed = false;
-                for (MappingConfig config : configMap.values()) {
-                    if (config.getConcurrent()) {
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            int hash = pkHash(config.getDbMapping(), singleDml.getData());
-                            SyncItem syncItem = new SyncItem(config, singleDml);
-                            dmlsPartition[hash].add(syncItem);
-                        });
-                    } else {
-                        int hash = 0;
-                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                        singleDmls.forEach(singleDml -> {
-                            SyncItem syncItem = new SyncItem(config, singleDml);
-                            dmlsPartition[hash].add(syncItem);
-                        });
-                    }
-                    executed = true;
-                }
-                return executed;
-            }
-        });
-    }
-
-    /**
-     * 单条 dml 同步
-     *
-     * @param batchExecutor 批量事务执行器
-     * @param config 对应配置对象
-     * @param dml DML
-     */
-    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
-        if (config != null) {
-            try {
-                String type = dml.getType();
-                if (type != null && type.equalsIgnoreCase("INSERT")) {
-                    insert(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
-                    update(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
-                    delete(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
-                    truncate(batchExecutor, config);
-                }
-                if (logger.isDebugEnabled()) {
-                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
-                }
-            } catch (SQLException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-
-    /**
-     * 插入操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        StringBuilder insertSql = new StringBuilder();
-        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
-
-        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
-        int len = insertSql.length();
-        insertSql.delete(len - 1, len).append(") VALUES (");
-        int mapLen = columnsMap.size();
-        for (int i = 0; i < mapLen; i++) {
-            insertSql.append("?,");
-        }
-        len = insertSql.length();
-        insertSql.delete(len - 1, len).append(")");
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            Object value = data.get(srcColumnName);
-            BatchExecutor.setValue(values, type, value);
-        }
-
-        try {
-            batchExecutor.execute(insertSql.toString(), values);
-        } catch (SQLException e) {
-            if (skipDupException
-                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
-                // ignore
-                // TODO 增加更多关系数据库的主键冲突的错误码
-            } else {
-                throw e;
-            }
-        }
-        if (logger.isTraceEnabled()) {
-            logger.trace("Insert into target table, sql: {}", insertSql);
-        }
-
-    }
-
-    /**
-     * 更新操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        Map<String, Object> old = dml.getOld();
-        if (old == null || old.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder updateSql = new StringBuilder();
-        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
-        List<Map<String, ?>> values = new ArrayList<>();
-        boolean hasMatched = false;
-        for (String srcColumnName : old.keySet()) {
-            List<String> targetColumnNames = new ArrayList<>();
-            columnsMap.forEach((targetColumn, srcColumn) -> {
-                if (srcColumnName.toLowerCase().equals(srcColumn.toLowerCase())) {
-                    targetColumnNames.add(targetColumn);
-                }
-            });
-            if (!targetColumnNames.isEmpty()) {
-                hasMatched = true;
-                for (String targetColumnName : targetColumnNames) {
-                    updateSql.append(targetColumnName).append("=?, ");
-                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-                    if (type == null) {
-                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-                    }
-                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
-                }
-            }
-        }
-        if (!hasMatched) {
-            logger.warn("Did not matched any columns to update ");
-            return;
-        }
-        int len = updateSql.length();
-        updateSql.delete(len - 2, len).append(" WHERE ");
-
-        // 拼接主键
-        appendCondition(dbMapping, updateSql, ctype, values, data, old);
-        batchExecutor.execute(updateSql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Update target table, sql: {}", updateSql);
-        }
-    }
-
-    /**
-     * 删除操作
-     *
-     * @param config
-     * @param dml
-     */
-    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder sql = new StringBuilder();
-        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        // 拼接主键
-        appendCondition(dbMapping, sql, ctype, values, data);
-        batchExecutor.execute(sql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Delete from target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * truncate操作
-     *
-     * @param config
-     */
-    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
-        DbMapping dbMapping = config.getDbMapping();
-        StringBuilder sql = new StringBuilder();
-        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
-        batchExecutor.execute(sql.toString(), new ArrayList<>());
-        if (logger.isTraceEnabled()) {
-            logger.trace("Truncate target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * 获取目标字段类型
-     *
-     * @param conn sql connection
-     * @param config 映射配置
-     * @return 字段sqlType
-     */
-    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
-        DbMapping dbMapping = config.getDbMapping();
-        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
-        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
-        if (columnType == null) {
-            synchronized (RdbSyncService.class) {
-                columnType = columnsTypeCache.get(cacheKey);
-                if (columnType == null) {
-                    columnType = new LinkedHashMap<>();
-                    final Map<String, Integer> columnTypeTmp = columnType;
-                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
-                    Util.sqlRS(conn, sql, rs -> {
-                        try {
-                            ResultSetMetaData rsd = rs.getMetaData();
-                            int columnCount = rsd.getColumnCount();
-                            for (int i = 1; i <= columnCount; i++) {
-                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
-                            }
-                            columnsTypeCache.put(cacheKey, columnTypeTmp);
-                        } catch (SQLException e) {
-                            logger.error(e.getMessage(), e);
-                        }
-                    });
-                }
-            }
-        }
-        return columnType;
-    }
-
-    /**
-     * 拼接主键 where条件
-     */
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d) {
-        appendCondition(dbMapping, sql, ctype, values, d, null);
-    }
-
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
-        // 拼接主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            sql.append(targetColumnName).append("=? AND ");
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            // 如果有修改主键的情况
-            if (o != null && o.containsKey(srcColumnName)) {
-                BatchExecutor.setValue(values, type, o.get(srcColumnName));
-            } else {
-                BatchExecutor.setValue(values, type, d.get(srcColumnName));
-            }
-        }
-        int len = sql.length();
-        sql.delete(len - 4, len);
-    }
-
-    public static class SyncItem {
-
-        private MappingConfig config;
-        private SingleDml     singleDml;
-
-        public SyncItem(MappingConfig config, SingleDml singleDml){
-            this.config = config;
-            this.singleDml = singleDml;
-        }
-    }
-
-    /**
-     * 取主键hash
-     */
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
-        return pkHash(dbMapping, d, null);
-    }
-
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
-        int hash = 0;
-        // 取主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            Object value = null;
-            if (o != null && o.containsKey(srcColumnName)) {
-                value = o.get(srcColumnName);
-            } else if (d != null) {
-                value = d.get(srcColumnName);
-            }
-            if (value != null) {
-                hash += value.hashCode();
-            }
-        }
-        hash = Math.abs(hash) % threads;
-        return Math.abs(hash);
-    }
-
-    public void close() {
-        for (int i = 0; i < threads; i++) {
-            executorThreads[i].shutdown();
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.service;
+
+import java.sql.Connection;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+/**
+ * RDB同步操作业务
+ *
+ * @author rewerma 2018-11-7 下午06:45:49
+ * @version 1.0.0
+ */
+public class RdbSyncService {
+
+    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
+
+    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
+    private Map<String, Map<String, Integer>> columnsTypeCache;
+
+    private int                               threads = 3;
+    private boolean                           skipDupException;
+
+    private List<SyncItem>[]                  dmlsPartition;
+    private BatchExecutor[]                   batchExecutors;
+    private ExecutorService[]                 executorThreads;
+
+    public List<SyncItem>[] getDmlsPartition() {
+        return dmlsPartition;
+    }
+
+    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
+        return columnsTypeCache;
+    }
+
+    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
+        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
+    }
+
+    @SuppressWarnings("unchecked")
+    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
+                          boolean skipDupException){
+        this.columnsTypeCache = columnsTypeCache;
+        this.skipDupException = skipDupException;
+        try {
+            if (threads != null) {
+                this.threads = threads;
+            }
+            this.dmlsPartition = new List[this.threads];
+            this.batchExecutors = new BatchExecutor[this.threads];
+            this.executorThreads = new ExecutorService[this.threads];
+            for (int i = 0; i < this.threads; i++) {
+                dmlsPartition[i] = new ArrayList<>();
+                batchExecutors[i] = new BatchExecutor(dataSource);
+                executorThreads[i] = Executors.newSingleThreadExecutor();
+            }
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 批量同步回调
+     *
+     * @param dmls 批量 DML
+     * @param function 回调方法
+     */
+    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
+        try {
+            boolean toExecute = false;
+            for (Dml dml : dmls) {
+                if (!toExecute) {
+                    toExecute = function.apply(dml);
+                } else {
+                    function.apply(dml);
+                }
+            }
+            if (toExecute) {
+                List<Future<Boolean>> futures = new ArrayList<>();
+                for (int i = 0; i < threads; i++) {
+                    int j = i;
+                    futures.add(executorThreads[i].submit(() -> {
+                        try {
+                            dmlsPartition[j]
+                                .forEach(syncItem -> sync(batchExecutors[j], syncItem.config, syncItem.singleDml));
+                            dmlsPartition[j].clear();
+                            batchExecutors[j].commit();
+                            return true;
+                        } catch (Throwable e) {
+                            batchExecutors[j].rollback();
+                            throw new RuntimeException(e);
+                        }
+                    }));
+                }
+
+                futures.forEach(future -> {
+                    try {
+                        future.get();
+                    } catch (ExecutionException | InterruptedException e) {
+                        throw new RuntimeException(e);
+                    }
+                });
+            }
+        } finally {
+            for (BatchExecutor batchExecutor : batchExecutors) {
+                if (batchExecutor != null) {
+                    batchExecutor.close();
+                }
+            }
+        }
+    }
+
+    /**
+     * 批量同步
+     *
+     * @param mappingConfig 配置集合
+     * @param dmls 批量 DML
+     */
+    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls, Properties envProperties) {
+        sync(dmls, dml -> {
+            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
+                // DDL
+                columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
+                return false;
+            } else {
+                // DML
+                String destination = StringUtils.trimToEmpty(dml.getDestination());
+                String groupId = StringUtils.trimToEmpty(dml.getGroupId());
+                String database = dml.getDatabase();
+                String table = dml.getTable();
+                Map<String, MappingConfig> configMap;
+                if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                    configMap = mappingConfig.get(destination + "-" + groupId + "_" + database + "-" + table);
+                } else {
+                    configMap = mappingConfig.get(destination + "_" + database + "-" + table);
+                }
+
+                if (configMap == null) {
+                    return false;
+                }
+
+                if (configMap.values().isEmpty()) {
+                    return false;
+                }
+
+                for (MappingConfig config : configMap.values()) {
+                    if (config.getConcurrent()) {
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            int hash = pkHash(config.getDbMapping(), singleDml.getData());
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    } else {
+                        int hash = 0;
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    /**
+     * 单条 dml 同步
+     *
+     * @param batchExecutor 批量事务执行器
+     * @param config 对应配置对象
+     * @param dml DML
+     */
+    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        if (config != null) {
+            try {
+                String type = dml.getType();
+                if (type != null && type.equalsIgnoreCase("INSERT")) {
+                    insert(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                    update(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                    delete(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
+                    truncate(batchExecutor, config);
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                }
+            } catch (SQLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    /**
+     * 插入操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        StringBuilder insertSql = new StringBuilder();
+        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
+
+        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+        int len = insertSql.length();
+        insertSql.delete(len - 1, len).append(") VALUES (");
+        int mapLen = columnsMap.size();
+        for (int i = 0; i < mapLen; i++) {
+            insertSql.append("?,");
+        }
+        len = insertSql.length();
+        insertSql.delete(len - 1, len).append(")");
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            Object value = data.get(srcColumnName);
+            BatchExecutor.setValue(values, type, value);
+        }
+
+        try {
+            batchExecutor.execute(insertSql.toString(), values);
+        } catch (SQLException e) {
+            if (skipDupException
+                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
+                // ignore
+                // TODO 增加更多关系数据库的主键冲突的错误码
+            } else {
+                throw e;
+            }
+        }
+        if (logger.isTraceEnabled()) {
+            logger.trace("Insert into target table, sql: {}", insertSql);
+        }
+
+    }
+
+    /**
+     * 更新操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        Map<String, Object> old = dml.getOld();
+        if (old == null || old.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder updateSql = new StringBuilder();
+        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
+        List<Map<String, ?>> values = new ArrayList<>();
+        boolean hasMatched = false;
+        for (String srcColumnName : old.keySet()) {
+            List<String> targetColumnNames = new ArrayList<>();
+            columnsMap.forEach((targetColumn, srcColumn) -> {
+                if (srcColumnName.toLowerCase().equals(srcColumn.toLowerCase())) {
+                    targetColumnNames.add(targetColumn);
+                }
+            });
+            if (!targetColumnNames.isEmpty()) {
+                hasMatched = true;
+                for (String targetColumnName : targetColumnNames) {
+                    updateSql.append(targetColumnName).append("=?, ");
+                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+                    if (type == null) {
+                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+                    }
+                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
+                }
+            }
+        }
+        if (!hasMatched) {
+            logger.warn("Did not matched any columns to update ");
+            return;
+        }
+        int len = updateSql.length();
+        updateSql.delete(len - 2, len).append(" WHERE ");
+
+        // 拼接主键
+        appendCondition(dbMapping, updateSql, ctype, values, data, old);
+        batchExecutor.execute(updateSql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Update target table, sql: {}", updateSql);
+        }
+    }
+
+    /**
+     * 删除操作
+     *
+     * @param config
+     * @param dml
+     */
+    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder sql = new StringBuilder();
+        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        // 拼接主键
+        appendCondition(dbMapping, sql, ctype, values, data);
+        batchExecutor.execute(sql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Delete from target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * truncate操作
+     *
+     * @param config
+     */
+    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
+        DbMapping dbMapping = config.getDbMapping();
+        StringBuilder sql = new StringBuilder();
+        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
+        batchExecutor.execute(sql.toString(), new ArrayList<>());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Truncate target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * 获取目标字段类型
+     *
+     * @param conn sql connection
+     * @param config 映射配置
+     * @return 字段sqlType
+     */
+    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
+        DbMapping dbMapping = config.getDbMapping();
+        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
+        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
+        if (columnType == null) {
+            synchronized (RdbSyncService.class) {
+                columnType = columnsTypeCache.get(cacheKey);
+                if (columnType == null) {
+                    columnType = new LinkedHashMap<>();
+                    final Map<String, Integer> columnTypeTmp = columnType;
+                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
+                    Util.sqlRS(conn, sql, rs -> {
+                        try {
+                            ResultSetMetaData rsd = rs.getMetaData();
+                            int columnCount = rsd.getColumnCount();
+                            for (int i = 1; i <= columnCount; i++) {
+                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
+                            }
+                            columnsTypeCache.put(cacheKey, columnTypeTmp);
+                        } catch (SQLException e) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    });
+                }
+            }
+        }
+        return columnType;
+    }
+
+    /**
+     * 拼接主键 where条件
+     */
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d) {
+        appendCondition(dbMapping, sql, ctype, values, d, null);
+    }
+
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
+        // 拼接主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            sql.append(targetColumnName).append("=? AND ");
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            // 如果有修改主键的情况
+            if (o != null && o.containsKey(srcColumnName)) {
+                BatchExecutor.setValue(values, type, o.get(srcColumnName));
+            } else {
+                BatchExecutor.setValue(values, type, d.get(srcColumnName));
+            }
+        }
+        int len = sql.length();
+        sql.delete(len - 4, len);
+    }
+
+    public static class SyncItem {
+
+        private MappingConfig config;
+        private SingleDml     singleDml;
+
+        public SyncItem(MappingConfig config, SingleDml singleDml){
+            this.config = config;
+            this.singleDml = singleDml;
+        }
+    }
+
+    /**
+     * 取主键hash
+     */
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
+        return pkHash(dbMapping, d, null);
+    }
+
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
+        int hash = 0;
+        // 取主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            Object value = null;
+            if (o != null && o.containsKey(srcColumnName)) {
+                value = o.get(srcColumnName);
+            } else if (d != null) {
+                value = d.get(srcColumnName);
+            }
+            if (value != null) {
+                hash += value.hashCode();
+            }
+        }
+        hash = Math.abs(hash) % threads;
+        return Math.abs(hash);
+    }
+
+    public void close() {
+        for (int i = 0; i < threads; i++) {
+            executorThreads[i].shutdown();
+        }
+    }
+}

+ 10 - 9
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/BatchExecutor.java

@@ -52,16 +52,17 @@ public class BatchExecutor implements Closeable {
     }
 
     public void execute(String sql, List<Map<String, ?>> values) throws SQLException {
-        PreparedStatement pstmt = getConn().prepareStatement(sql);
-        int len = values.size();
-        for (int i = 0; i < len; i++) {
-            int type = (Integer) values.get(i).get("type");
-            Object value = values.get(i).get("value");
-            SyncUtil.setPStmt(type, pstmt, value, i + 1);
-        }
+        try (PreparedStatement pstmt = getConn().prepareStatement(sql)) {
+            int len = values.size();
+            for (int i = 0; i < len; i++) {
+                int type = (Integer) values.get(i).get("type");
+                Object value = values.get(i).get("value");
+                SyncUtil.setPStmt(type, pstmt, value, i + 1);
+            }
 
-        pstmt.execute();
-        idx.incrementAndGet();
+            pstmt.execute();
+            idx.incrementAndGet();
+        }
     }
 
     public void commit() throws SQLException {

+ 2 - 2
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SyncUtil.java

@@ -200,7 +200,7 @@ public class SyncUtil {
                         DateTime dt = new DateTime(v);
                         pstmt.setDate(i, new Date(dt.toDate().getTime()));
                     } else {
-                        pstmt.setNull(i, type);
+                        pstmt.setObject(i, value);
                     }
                 } else {
                     pstmt.setNull(i, type);
@@ -232,7 +232,7 @@ public class SyncUtil {
                         DateTime dt = new DateTime(v);
                         pstmt.setTimestamp(i, new Timestamp(dt.toDate().getTime()));
                     } else {
-                        pstmt.setNull(i, type);
+                        pstmt.setObject(i, value);
                     }
                 } else {
                     pstmt.setNull(i, type);

+ 5 - 3
client-adapter/rdb/src/main/resources/rdb/mytest_user.yml

@@ -1,11 +1,12 @@
 dataSourceKey: defaultDS
 destination: example
-outerAdapterKey: oracle1
+groupId: g1
+outerAdapterKey: mysql1
 concurrent: true
 dbMapping:
   database: mytest
   table: user
-  targetTable: mytest.tb_user
+  targetTable: mytest2.user
   targetPk:
     id: id
 #  mapAll: true
@@ -17,9 +18,10 @@ dbMapping:
     test1:
 
 
-# Mirror schema synchronize config
+## Mirror schema synchronize config
 #dataSourceKey: defaultDS
 #destination: example
+#groupId: g1
 #outerAdapterKey: mysql1
 #concurrent: true
 #dbMapping:

+ 1 - 1
client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/ConfigLoadTest.java

@@ -20,7 +20,7 @@ public class ConfigLoadTest {
 
     @Test
     public void testLoad() {
-        Map<String, MappingConfig> configMap =  ConfigLoader.load();
+        Map<String, MappingConfig> configMap =  ConfigLoader.load(null);
 
         Assert.assertFalse(configMap.isEmpty());
     }

+ 1 - 1
client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/sync/Common.java

@@ -24,7 +24,7 @@ public class Common {
         outerAdapterConfig.setProperties(properties);
 
         RdbAdapter adapter = new RdbAdapter();
-        adapter.init(outerAdapterConfig);
+        adapter.init(outerAdapterConfig, null);
         return adapter;
     }
 }

+ 325 - 326
client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java

@@ -1,326 +1,325 @@
-package com.alibaba.otter.canal.client.kafka;
-
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-
-import com.google.common.collect.MapMaker;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.serialization.StringDeserializer;
-
-import com.alibaba.fastjson.JSON;
-import com.alibaba.otter.canal.client.CanalMQConnector;
-import com.alibaba.otter.canal.client.impl.SimpleCanalConnector;
-import com.alibaba.otter.canal.protocol.FlatMessage;
-import com.alibaba.otter.canal.protocol.Message;
-import com.alibaba.otter.canal.protocol.exception.CanalClientException;
-import com.google.common.collect.Lists;
-
-/**
- * canal kafka 数据操作客户端
- *
- * <pre>
- * 注意点:
- * 1. 相比于canal {@linkplain SimpleCanalConnector}, 这里get和ack操作不能有并发, 必须是一个线程执行get后,内存里执行完毕ack后再取下一个get
- * </pre>
- *
- * @author machengyuan @ 2018-6-12
- * @version 1.1.1
- */
-public class KafkaCanalConnector implements CanalMQConnector {
-
-    protected KafkaConsumer<String, Message> kafkaConsumer;
-    protected KafkaConsumer<String, String>  kafkaConsumer2;                            // 用于扁平message的数据消费
-    protected String                         topic;
-    protected Integer                        partition;
-    protected Properties                     properties;
-    protected volatile boolean               connected      = false;
-    protected volatile boolean               running        = false;
-    protected boolean                        flatMessage;
-
-    private Map<Integer, Long>               currentOffsets = new ConcurrentHashMap<>();
-
-    public KafkaCanalConnector(String servers, String topic, Integer partition, String groupId, Integer batchSize,
-                               boolean flatMessage){
-        this.topic = topic;
-        this.partition = partition;
-        this.flatMessage = flatMessage;
-
-        properties = new Properties();
-        properties.put("bootstrap.servers", servers);
-        properties.put("group.id", groupId);
-        properties.put("enable.auto.commit", false);
-        properties.put("auto.commit.interval.ms", "1000");
-        properties.put("auto.offset.reset", "latest"); // 如果没有offset则从最后的offset开始读
-        properties.put("request.timeout.ms", "40000"); // 必须大于session.timeout.ms的设置
-        properties.put("session.timeout.ms", "30000"); // 默认为30秒
-        if (batchSize == null) {
-            batchSize = 100;
-        }
-        properties.put("max.poll.records", batchSize.toString());
-        properties.put("key.deserializer", StringDeserializer.class.getName());
-        if (!flatMessage) {
-            properties.put("value.deserializer", MessageDeserializer.class.getName());
-        } else {
-            properties.put("value.deserializer", StringDeserializer.class.getName());
-        }
-    }
-
-    /**
-     * 打开连接
-     */
-    @Override
-    public void connect() {
-        if (connected) {
-            return;
-        }
-
-        connected = true;
-        if (kafkaConsumer == null && !flatMessage) {
-            kafkaConsumer = new KafkaConsumer<String, Message>(properties);
-
-        }
-        if (kafkaConsumer2 == null && flatMessage) {
-            kafkaConsumer2 = new KafkaConsumer<String, String>(properties);
-        }
-    }
-
-    /**
-     * 关闭链接
-     */
-    @Override
-    public void disconnect() {
-        if (kafkaConsumer != null) {
-            kafkaConsumer.close();
-            kafkaConsumer = null;
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.close();
-            kafkaConsumer2 = null;
-        }
-
-        connected = false;
-    }
-
-    protected void waitClientRunning() {
-        running = true;
-    }
-
-    @Override
-    public boolean checkValid() {
-        return true;// 默认都放过
-    }
-
-    /**
-     * 订阅topic
-     */
-    @Override
-    public void subscribe() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (partition == null) {
-            if (kafkaConsumer != null) {
-                kafkaConsumer.subscribe(Collections.singletonList(topic));
-            }
-            if (kafkaConsumer2 != null) {
-                kafkaConsumer2.subscribe(Collections.singletonList(topic));
-            }
-        } else {
-            TopicPartition topicPartition = new TopicPartition(topic, partition);
-            if (kafkaConsumer != null) {
-                kafkaConsumer.assign(Collections.singletonList(topicPartition));
-            }
-            if (kafkaConsumer2 != null) {
-                kafkaConsumer2.assign(Collections.singletonList(topicPartition));
-            }
-        }
-    }
-
-    /**
-     * 取消订阅
-     */
-    @Override
-    public void unsubscribe() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (kafkaConsumer != null) {
-            kafkaConsumer.unsubscribe();
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.unsubscribe();
-        }
-    }
-
-    @Override
-    public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        List<Message> messages = getListWithoutAck(timeout, unit);
-        if (messages != null && !messages.isEmpty()) {
-            this.ack();
-        }
-        return messages;
-    }
-
-    @Override
-    public List<Message> getListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));
-
-        currentOffsets.clear();
-        for (TopicPartition topicPartition : records.partitions()) {
-            currentOffsets.put(topicPartition.partition(), kafkaConsumer.position(topicPartition));
-        }
-
-        if (!records.isEmpty()) {
-            List<Message> messages = new ArrayList<>();
-            for (ConsumerRecord<String, Message> record : records) {
-                messages.add(record.value());
-            }
-            return messages;
-        }
-        return Lists.newArrayList();
-    }
-
-    @Override
-    public List<FlatMessage> getFlatList(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        List<FlatMessage> messages = getFlatListWithoutAck(timeout, unit);
-        if (messages != null && !messages.isEmpty()) {
-            this.ack();
-        }
-        return messages;
-    }
-
-    @Override
-    public List<FlatMessage> getFlatListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
-        waitClientRunning();
-        if (!running) {
-            return Lists.newArrayList();
-        }
-
-        ConsumerRecords<String, String> records = kafkaConsumer2.poll(unit.toMillis(timeout));
-
-        currentOffsets.clear();
-        for (TopicPartition topicPartition : records.partitions()) {
-            currentOffsets.put(topicPartition.partition(), kafkaConsumer2.position(topicPartition));
-        }
-
-        if (!records.isEmpty()) {
-            List<FlatMessage> flatMessages = new ArrayList<>();
-            for (ConsumerRecord<String, String> record : records) {
-                String flatMessageJson = record.value();
-                FlatMessage flatMessage = JSON.parseObject(flatMessageJson, FlatMessage.class);
-                flatMessages.add(flatMessage);
-            }
-
-            return flatMessages;
-        }
-        return Lists.newArrayList();
-    }
-
-    @Override
-    public void rollback() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-        // 回滚所有分区
-        if (kafkaConsumer != null) {
-            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
-                kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
-            }
-        }
-        if (kafkaConsumer2 != null) {
-            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
-                kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
-            }
-        }
-    }
-
-    /**
-     * 提交offset,如果超过 session.timeout.ms 设置的时间没有ack则会抛出异常,ack失败
-     */
-    @Override
-    public void ack() {
-        waitClientRunning();
-        if (!running) {
-            return;
-        }
-
-        if (kafkaConsumer != null) {
-            kafkaConsumer.commitSync();
-        }
-        if (kafkaConsumer2 != null) {
-            kafkaConsumer2.commitSync();
-        }
-    }
-
-    @Override
-    public void subscribe(String filter) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message get(int batchSize) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message getWithoutAck(int batchSize) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public void ack(long batchId) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    @Override
-    public void rollback(long batchId) throws CanalClientException {
-        throw new CanalClientException("mq not support this method");
-    }
-
-    /**
-     * 重新设置sessionTime
-     *
-     * @param timeout
-     * @param unit
-     */
-    public void setSessionTimeout(Long timeout, TimeUnit unit) {
-        long t = unit.toMillis(timeout);
-        properties.put("request.timeout.ms", String.valueOf(t + 60000));
-        properties.put("session.timeout.ms", String.valueOf(t));
-    }
-
-}
+package com.alibaba.otter.canal.client.kafka;
+
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.serialization.StringDeserializer;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.otter.canal.client.CanalMQConnector;
+import com.alibaba.otter.canal.client.impl.SimpleCanalConnector;
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import com.alibaba.otter.canal.protocol.Message;
+import com.alibaba.otter.canal.protocol.exception.CanalClientException;
+import com.google.common.collect.Lists;
+
+/**
+ * canal kafka 数据操作客户端
+ *
+ * <pre>
+ * 注意点:
+ * 1. 相比于canal {@linkplain SimpleCanalConnector}, 这里get和ack操作不能有并发, 必须是一个线程执行get后,内存里执行完毕ack后再取下一个get
+ * </pre>
+ *
+ * @author machengyuan @ 2018-6-12
+ * @version 1.1.1
+ */
+public class KafkaCanalConnector implements CanalMQConnector {
+
+    protected KafkaConsumer<String, Message> kafkaConsumer;
+    protected KafkaConsumer<String, String>  kafkaConsumer2;                            // 用于扁平message的数据消费
+    protected String                         topic;
+    protected Integer                        partition;
+    protected Properties                     properties;
+    protected volatile boolean               connected      = false;
+    protected volatile boolean               running        = false;
+    protected boolean                        flatMessage;
+
+    private Map<Integer, Long>               currentOffsets = new ConcurrentHashMap<>();
+
+    public KafkaCanalConnector(String servers, String topic, Integer partition, String groupId, Integer batchSize,
+                               boolean flatMessage){
+        this.topic = topic;
+        this.partition = partition;
+        this.flatMessage = flatMessage;
+
+        properties = new Properties();
+        properties.put("bootstrap.servers", servers);
+        properties.put("group.id", groupId);
+        properties.put("enable.auto.commit", false);
+        properties.put("auto.commit.interval.ms", "1000");
+        properties.put("auto.offset.reset", "latest"); // 如果没有offset则从最后的offset开始读
+        properties.put("request.timeout.ms", "40000"); // 必须大于session.timeout.ms的设置
+        properties.put("session.timeout.ms", "30000"); // 默认为30秒
+        if (batchSize == null) {
+            batchSize = 100;
+        }
+        properties.put("max.poll.records", batchSize.toString());
+        properties.put("key.deserializer", StringDeserializer.class.getName());
+        if (!flatMessage) {
+            properties.put("value.deserializer", MessageDeserializer.class.getName());
+        } else {
+            properties.put("value.deserializer", StringDeserializer.class.getName());
+        }
+    }
+
+    /**
+     * 打开连接
+     */
+    @Override
+    public void connect() {
+        if (connected) {
+            return;
+        }
+
+        connected = true;
+        if (kafkaConsumer == null && !flatMessage) {
+            kafkaConsumer = new KafkaConsumer<String, Message>(properties);
+
+        }
+        if (kafkaConsumer2 == null && flatMessage) {
+            kafkaConsumer2 = new KafkaConsumer<String, String>(properties);
+        }
+    }
+
+    /**
+     * 关闭链接
+     */
+    @Override
+    public void disconnect() {
+        if (kafkaConsumer != null) {
+            kafkaConsumer.close();
+            kafkaConsumer = null;
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.close();
+            kafkaConsumer2 = null;
+        }
+
+        connected = false;
+    }
+
+    protected void waitClientRunning() {
+        running = true;
+    }
+
+    @Override
+    public boolean checkValid() {
+        return true;// 默认都放过
+    }
+
+    /**
+     * 订阅topic
+     */
+    @Override
+    public void subscribe() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (partition == null) {
+            if (kafkaConsumer != null) {
+                kafkaConsumer.subscribe(Collections.singletonList(topic));
+            }
+            if (kafkaConsumer2 != null) {
+                kafkaConsumer2.subscribe(Collections.singletonList(topic));
+            }
+        } else {
+            TopicPartition topicPartition = new TopicPartition(topic, partition);
+            if (kafkaConsumer != null) {
+                kafkaConsumer.assign(Collections.singletonList(topicPartition));
+            }
+            if (kafkaConsumer2 != null) {
+                kafkaConsumer2.assign(Collections.singletonList(topicPartition));
+            }
+        }
+    }
+
+    /**
+     * 取消订阅
+     */
+    @Override
+    public void unsubscribe() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (kafkaConsumer != null) {
+            kafkaConsumer.unsubscribe();
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.unsubscribe();
+        }
+    }
+
+    @Override
+    public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        List<Message> messages = getListWithoutAck(timeout, unit);
+        if (messages != null && !messages.isEmpty()) {
+            this.ack();
+        }
+        return messages;
+    }
+
+    @Override
+    public List<Message> getListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));
+
+        currentOffsets.clear();
+        for (TopicPartition topicPartition : records.partitions()) {
+            currentOffsets.put(topicPartition.partition(), kafkaConsumer.position(topicPartition));
+        }
+
+        if (!records.isEmpty()) {
+            List<Message> messages = new ArrayList<>();
+            for (ConsumerRecord<String, Message> record : records) {
+                messages.add(record.value());
+            }
+            return messages;
+        }
+        return Lists.newArrayList();
+    }
+
+    @Override
+    public List<FlatMessage> getFlatList(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        List<FlatMessage> messages = getFlatListWithoutAck(timeout, unit);
+        if (messages != null && !messages.isEmpty()) {
+            this.ack();
+        }
+        return messages;
+    }
+
+    @Override
+    public List<FlatMessage> getFlatListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
+        waitClientRunning();
+        if (!running) {
+            return Lists.newArrayList();
+        }
+
+        ConsumerRecords<String, String> records = kafkaConsumer2.poll(unit.toMillis(timeout));
+
+        currentOffsets.clear();
+        for (TopicPartition topicPartition : records.partitions()) {
+            currentOffsets.put(topicPartition.partition(), kafkaConsumer2.position(topicPartition));
+        }
+
+        if (!records.isEmpty()) {
+            List<FlatMessage> flatMessages = new ArrayList<>();
+            for (ConsumerRecord<String, String> record : records) {
+                String flatMessageJson = record.value();
+                FlatMessage flatMessage = JSON.parseObject(flatMessageJson, FlatMessage.class);
+                flatMessages.add(flatMessage);
+            }
+
+            return flatMessages;
+        }
+        return Lists.newArrayList();
+    }
+
+    @Override
+    public void rollback() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+        // 回滚所有分区
+        if (kafkaConsumer != null) {
+            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
+                kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
+            }
+        }
+        if (kafkaConsumer2 != null) {
+            for (Map.Entry<Integer, Long> entry : currentOffsets.entrySet()) {
+                kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1);
+            }
+        }
+    }
+
+    /**
+     * 提交offset,如果超过 session.timeout.ms 设置的时间没有ack则会抛出异常,ack失败
+     */
+    @Override
+    public void ack() {
+        waitClientRunning();
+        if (!running) {
+            return;
+        }
+
+        if (kafkaConsumer != null) {
+            kafkaConsumer.commitSync();
+        }
+        if (kafkaConsumer2 != null) {
+            kafkaConsumer2.commitSync();
+        }
+    }
+
+    @Override
+    public void subscribe(String filter) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message get(int batchSize) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message getWithoutAck(int batchSize) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public void ack(long batchId) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    @Override
+    public void rollback(long batchId) throws CanalClientException {
+        throw new CanalClientException("mq not support this method");
+    }
+
+    /**
+     * 重新设置sessionTime
+     *
+     * @param timeout
+     * @param unit
+     */
+    public void setSessionTimeout(Long timeout, TimeUnit unit) {
+        long t = unit.toMillis(timeout);
+        properties.put("request.timeout.ms", String.valueOf(t + 60000));
+        properties.put("session.timeout.ms", String.valueOf(t));
+    }
+
+}

+ 35 - 32
client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaFlatMessage.java

@@ -1,32 +1,35 @@
-package com.alibaba.otter.canal.client.kafka.protocol;
-
-import com.alibaba.otter.canal.protocol.FlatMessage;
-import org.springframework.beans.BeanUtils;
-
-/**
- * 消息对象(Kafka)
- *
- * @Author panjianping
- * @Email ipanjianping@qq.com
- * @Date 2018/12/17
- */
-public class KafkaFlatMessage extends FlatMessage {
-    /**
-     * Kafka 消息 offset
-     */
-    private long offset;
-
-    public KafkaFlatMessage(FlatMessage message, long offset) {
-        super(message.getId());
-        BeanUtils.copyProperties(message, this);
-        this.offset = offset;
-    }
-
-    public long getOffset() {
-        return offset;
-    }
-
-    public void setOffset(long offset) {
-        this.offset = offset;
-    }
-}
+package com.alibaba.otter.canal.client.kafka.protocol;
+
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import org.springframework.beans.BeanUtils;
+
+/**
+ * 消息对象(Kafka)
+ *
+ * @Author panjianping
+ * @Email ipanjianping@qq.com
+ * @Date 2018/12/17
+ */
+public class KafkaFlatMessage extends FlatMessage {
+
+    private static final long serialVersionUID = 5748024400508080710L;
+
+    /**
+     * Kafka 消息 offset
+     */
+    private long              offset;
+
+    public KafkaFlatMessage(FlatMessage message, long offset){
+        super(message.getId());
+        BeanUtils.copyProperties(message, this);
+        this.offset = offset;
+    }
+
+    public long getOffset() {
+        return offset;
+    }
+
+    public void setOffset(long offset) {
+        this.offset = offset;
+    }
+}

+ 35 - 33
client/src/main/java/com/alibaba/otter/canal/client/kafka/protocol/KafkaMessage.java

@@ -1,33 +1,35 @@
-package com.alibaba.otter.canal.client.kafka.protocol;
-
-import com.alibaba.otter.canal.protocol.Message;
-import org.springframework.beans.BeanUtils;
-
-/**
- * 消息对象(Kafka)
- *
- * @Author panjianping
- * @Email ipanjianping@qq.com
- * @Date 2018/12/17
- */
-public class KafkaMessage extends Message {
-    /**
-     * Kafka 消息 offset
-     */
-    private long offset;
-
-    public KafkaMessage(Message message, long offset) {
-        super(message.getId());
-        BeanUtils.copyProperties(message, this);
-        this.offset = offset;
-    }
-
-
-    public long getOffset() {
-        return offset;
-    }
-
-    public void setOffset(long offset) {
-        this.offset = offset;
-    }
-}
+package com.alibaba.otter.canal.client.kafka.protocol;
+
+import com.alibaba.otter.canal.protocol.Message;
+import org.springframework.beans.BeanUtils;
+
+/**
+ * 消息对象(Kafka)
+ *
+ * @Author panjianping
+ * @Email ipanjianping@qq.com
+ * @Date 2018/12/17
+ */
+public class KafkaMessage extends Message {
+
+    private static final long serialVersionUID = -293120358490119447L;
+
+    /**
+     * Kafka 消息 offset
+     */
+    private long              offset;
+
+    public KafkaMessage(Message message, long offset){
+        super(message.getId());
+        BeanUtils.copyProperties(message, this);
+        this.offset = offset;
+    }
+
+    public long getOffset() {
+        return offset;
+    }
+
+    public void setOffset(long offset) {
+        this.offset = offset;
+    }
+}

+ 53 - 0
common/src/main/java/com/alibaba/otter/canal/common/utils/CommonUtils.java

@@ -0,0 +1,53 @@
+package com.alibaba.otter.canal.common.utils;
+
+import java.io.File;
+
+/**
+ * 通用工具类
+ *
+ * @author rewerma 2019-01-25 下午05:20:16
+ * @version 1.0.0
+ */
+public class CommonUtils {
+
+    /**
+     * 获取conf文件夹所在路径
+     *
+     * @return 路径地址
+     */
+    public static String getConfPath() {
+        String classpath = CommonUtils.class.getResource("/").getPath();
+        String confPath = classpath + "../conf/";
+        if (new File(confPath).exists()) {
+            return confPath;
+        } else {
+            return classpath;
+        }
+    }
+
+    /**
+     * 删除文件夹
+     *
+     * @param dirFile 文件夹对象
+     * @return 是否删除成功
+     */
+    public static boolean deleteDir(File dirFile) {
+        if (!dirFile.exists()) {
+            return false;
+        }
+
+        if (dirFile.isFile()) {
+            return dirFile.delete();
+        } else {
+            File[] files = dirFile.listFiles();
+            if (files == null || files.length == 0) {
+                return dirFile.delete();
+            }
+            for (File file : files) {
+                deleteDir(file);
+            }
+        }
+
+        return dirFile.delete();
+    }
+}

+ 1 - 0
deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java

@@ -51,6 +51,7 @@ public class CanalConstants {
     public static final String CANAL_MQ_TRANSACTION              = ROOT + "." + "mq.transaction";
     public static final String CANAL_ALIYUN_ACCESSKEY            = ROOT + "." + "aliyun.accessKey";
     public static final String CANAL_ALIYUN_SECRETKEY            = ROOT + "." + "aliyun.secretKey";
+    public static final String CANAL_MQ_PROPERTIES               = ROOT + "." + "mq.properties";
 
     public static String getInstanceModeKey(String destination) {
         return MessageFormat.format(INSTANCE_MODE_TEMPLATE, destination);

+ 11 - 1
deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStater.java

@@ -1,5 +1,6 @@
 package com.alibaba.otter.canal.deployer;
 
+import java.util.Map;
 import java.util.Properties;
 
 import org.apache.commons.lang.StringUtils;
@@ -162,7 +163,16 @@ public class CanalStater {
         if (!StringUtils.isEmpty(transaction)) {
             mqProperties.setTransaction(Boolean.valueOf(transaction));
         }
+
+        for (Object key : properties.keySet()) {
+            key = StringUtils.trim(key.toString());
+            if (((String) key).startsWith(CanalConstants.CANAL_MQ_PROPERTIES)) {
+                String value = CanalController.getProperty(properties, (String) key);
+                String subKey = ((String) key).substring(CanalConstants.CANAL_MQ_PROPERTIES.length() + 1);
+                mqProperties.getProperties().put(subKey, value);
+            }
+        }
+
         return mqProperties;
     }
-
 }

+ 2 - 71
deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/DbRemoteConfigLoader.java

@@ -1,7 +1,6 @@
 package com.alibaba.otter.canal.deployer.monitor.remote;
 
 import java.io.ByteArrayInputStream;
-import java.io.File;
 import java.io.FileWriter;
 import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
@@ -18,6 +17,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.common.utils.CommonUtils;
 import com.alibaba.otter.canal.common.utils.NamedThreadFactory;
 import com.alibaba.otter.canal.deployer.CanalConstants;
 import com.google.common.base.Joiner;
@@ -101,7 +101,7 @@ public class DbRemoteConfigLoader implements RemoteConfigLoader {
      * @param content 远程配置内容文本
      */
     private void overrideLocalCanalConfig(String content) {
-        try (FileWriter writer = new FileWriter(getConfPath() + "canal.properties")) {
+        try (FileWriter writer = new FileWriter(CommonUtils.getConfPath() + "canal.properties")) {
             writer.write(content);
             writer.flush();
         } catch (Exception e) {
@@ -213,26 +213,6 @@ public class DbRemoteConfigLoader implements RemoteConfigLoader {
         }
     }
 
-    private static boolean deleteDir(File dirFile) {
-        if (!dirFile.exists()) {
-            return false;
-        }
-
-        if (dirFile.isFile()) {
-            return dirFile.delete();
-        } else {
-            File[] files = dirFile.listFiles();
-            if (files == null || files.length == 0) {
-                return dirFile.delete();
-            }
-            for (File file : files) {
-                deleteDir(file);
-            }
-        }
-
-        return dirFile.delete();
-    }
-
     /**
      * 监听 canal 主配置和 instance 配置变化
      *
@@ -281,53 +261,4 @@ public class DbRemoteConfigLoader implements RemoteConfigLoader {
         }
     }
 
-    /**
-     * 获取conf文件夹所在路径
-     *
-     * @return 路径地址
-     */
-    private String getConfPath() {
-        String classpath = this.getClass().getResource("/").getPath();
-        String confPath = classpath + ".." + File.separator + "conf" + File.separator;
-        if (new File(confPath).exists()) {
-            return confPath;
-        } else {
-            return classpath;
-        }
-    }
-
-    /**
-     * 远程xxx/instance.properties配置监听器实现
-     */
-    private class RemoteInstanceMonitorImpl implements RemoteInstanceMonitor {
-
-        @Override
-        public void onAdd(ConfigItem configItem) {
-            this.onModify(configItem);
-        }
-
-        @Override
-        public void onModify(ConfigItem configItem) {
-            File instanceDir = new File(getConfPath() + configItem.getName());
-            if (!instanceDir.exists()) {
-                instanceDir.mkdirs();
-            }
-            try (FileWriter writer = new FileWriter(getConfPath() + configItem.getName() + "/instance.properties")) {
-                writer.write(configItem.getContent());
-                writer.flush();
-                logger.info("## Loaded remote instance config: {}/instance.properties ", configItem.getName());
-            } catch (Exception e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-
-        @Override
-        public void onDelete(String instanceName) {
-            File file = new File(getConfPath() + instanceName + "/");
-            if (file.exists()) {
-                deleteDir(file);
-                logger.info("## Deleted and loaded remote instance config: {} ", instanceName);
-            }
-        }
-    }
 }

+ 55 - 0
deployer/src/main/java/com/alibaba/otter/canal/deployer/monitor/remote/RemoteInstanceMonitorImpl.java

@@ -0,0 +1,55 @@
+package com.alibaba.otter.canal.deployer.monitor.remote;
+
+import java.io.File;
+import java.io.FileWriter;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.common.utils.CommonUtils;
+
+/**
+ * 远程xxx/instance.properties配置监听器实现
+ *
+ * @author rewerma 2019-01-25 下午05:20:16
+ * @version 1.0.0
+ */
+public class RemoteInstanceMonitorImpl implements RemoteInstanceMonitor {
+
+    private static final Logger logger = LoggerFactory.getLogger(RemoteInstanceMonitorImpl.class);
+
+    @Override
+    public void onAdd(ConfigItem configItem) {
+        this.onModify(configItem);
+    }
+
+    @Override
+    public void onModify(ConfigItem configItem) {
+        String confDir = CommonUtils.getConfPath() + configItem.getName();
+        File instanceDir = new File(confDir);
+        if (!instanceDir.exists()) {
+            boolean mkDirs = instanceDir.mkdirs();
+            if (!mkDirs) {
+                logger.info("## Error to create instance config dir: {}", configItem.getName());
+                return;
+            }
+        }
+        try (FileWriter writer = new FileWriter(confDir + "/instance.properties")) {
+            writer.write(configItem.getContent());
+            writer.flush();
+            logger.info("## Loaded remote instance config: {}/instance.properties ", configItem.getName());
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    @Override
+    public void onDelete(String instanceName) {
+        File file = new File(CommonUtils.getConfPath() + instanceName + "/");
+        if (file.exists()) {
+            CommonUtils.deleteDir(file);
+            logger.info("## Deleted and loaded remote instance config: {} ", instanceName);
+        }
+    }
+
+}

+ 2 - 1
deployer/src/main/resources/canal.properties

@@ -116,4 +116,5 @@ canal.mq.flatMessage = true
 canal.mq.compressionType = none
 canal.mq.acks = all
 # use transaction for kafka flatMessage batch produce
-canal.mq.transaction = false
+canal.mq.transaction = false
+#canal.mq.properties. =

+ 6 - 0
filter/src/test/java/com/alibaba/otter/canal/filter/AviaterFilterTest.java

@@ -50,6 +50,12 @@ public class AviaterFilterTest {
         result = filter.filter("s3.t2");
         Assert.assertEquals(false, result);
 
+        result = filter.filter("S1.S2");
+        Assert.assertEquals(true, result);
+
+        result = filter.filter("S2.S1");
+        Assert.assertEquals(true, result);
+
         AviaterRegexFilter filter2 = new AviaterRegexFilter("s1\\..*,s2.t1");
 
         result = filter2.filter("s1.t1");

+ 3 - 1
instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java

@@ -59,7 +59,7 @@ import com.alibaba.otter.canal.store.model.BatchMode;
 
 /**
  * 单个canal实例,比如一个destination会独立一个实例
- * 
+ *
  * @author jianghang 2012-7-11 下午09:26:51
  * @version 1.0.0
  */
@@ -294,6 +294,7 @@ public class CanalInstanceWithManager extends AbstractCanalInstance {
             mysqlEventParser.setFallbackIntervalInSeconds(parameters.getFallbackIntervalInSeconds());
             mysqlEventParser.setProfilingEnabled(false);
             mysqlEventParser.setFilterTableError(parameters.getFilterTableError());
+            mysqlEventParser.setParallel(parameters.getParallel());
             mysqlEventParser.setIsGTIDMode(BooleanUtils.toBoolean(parameters.getGtidEnable()));
             // tsdb
             if (parameters.getTsdbSnapshotInterval() != null) {
@@ -340,6 +341,7 @@ public class CanalInstanceWithManager extends AbstractCanalInstance {
             localBinlogEventParser.setDetectingEnable(parameters.getDetectingEnable());
             localBinlogEventParser.setDetectingIntervalInSeconds(parameters.getDetectingIntervalInSeconds());
             localBinlogEventParser.setFilterTableError(parameters.getFilterTableError());
+            localBinlogEventParser.setParallel(parameters.getParallel());
             // 数据库信息,反查表结构时需要
             if (!CollectionUtils.isEmpty(dbAddresses)) {
                 localBinlogEventParser.setMasterInfo(new AuthenticationInfo(dbAddresses.get(0),

+ 11 - 2
instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/model/CanalParameter.java

@@ -12,7 +12,7 @@ import com.alibaba.otter.canal.common.utils.CanalToStringStyle;
 
 /**
  * canal运行相关参数
- * 
+ *
  * @author jianghang 2012-7-4 下午02:52:52
  * @version 1.0.0
  */
@@ -118,6 +118,7 @@ public class CanalParameter implements Serializable {
     private String                   standbyLogfileName                 = null;                      // standby起始位置
     private Long                     standbyLogfileOffest               = null;
     private Long                     standbyTimestamp                   = null;
+    private Boolean                  parallel                           = Boolean.FALSE;
 
     public static enum RunMode {
 
@@ -328,7 +329,7 @@ public class CanalParameter implements Serializable {
 
     /**
      * 数据来源描述
-     * 
+     *
      * @author jianghang 2012-12-26 上午11:05:20
      * @version 4.1.5
      */
@@ -982,6 +983,14 @@ public class CanalParameter implements Serializable {
         this.tsdbSnapshotExpire = tsdbSnapshotExpire;
     }
 
+    public Boolean getParallel() {
+        return parallel;
+    }
+
+    public void setParallel(Boolean parallel) {
+        this.parallel = parallel;
+    }
+
     public String toString() {
         return ToStringBuilder.reflectionToString(this, CanalToStringStyle.DEFAULT_STYLE);
     }

+ 7 - 8
parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java

@@ -514,15 +514,14 @@ public class MysqlConnection implements ErosaConnection {
         ResultSetPacket rs = null;
         try {
             rs = query("select @@global.binlog_checksum");
+            List<String> columnValues = rs.getFieldValues();
+            if (columnValues != null && columnValues.size() >= 1 && columnValues.get(0).toUpperCase().equals("CRC32")) {
+                binlogChecksum = LogEvent.BINLOG_CHECKSUM_ALG_CRC32;
+            } else {
+                binlogChecksum = LogEvent.BINLOG_CHECKSUM_ALG_OFF;
+            }
         } catch (Throwable e) {
-            // ignore
-            return;
-        }
-
-        List<String> columnValues = rs.getFieldValues();
-        if (columnValues != null && columnValues.size() >= 1 && columnValues.get(0).toUpperCase().equals("CRC32")) {
-            binlogChecksum = LogEvent.BINLOG_CHECKSUM_ALG_CRC32;
-        } else {
+            logger.error("", e);
             binlogChecksum = LogEvent.BINLOG_CHECKSUM_ALG_OFF;
         }
     }

+ 35 - 45
server/src/main/java/com/alibaba/otter/canal/common/MQMessageUtils.java

@@ -1,6 +1,12 @@
 package com.alibaba.otter.canal.common;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
 
@@ -24,46 +30,36 @@ import com.google.protobuf.InvalidProtocolBufferException;
 public class MQMessageUtils {
 
     @SuppressWarnings("deprecation")
-    private static Map<String, List<PartitionData>>    partitionDatas    = MigrateMap
-        .makeComputingMap(new MapMaker().softValues(), new Function<String, List<PartitionData>>() {
+    private static Map<String, List<PartitionData>>    partitionDatas    = MigrateMap.makeComputingMap(new MapMaker().softValues(),
+                                                                             new Function<String, List<PartitionData>>() {
 
                                                                                  public List<PartitionData> apply(String pkHashConfigs) {
-                                                                                     List<PartitionData> datas = Lists
-                                                                                         .newArrayList();
-                                                                                     String[] pkHashConfigArray = StringUtils
-                                                                                         .split(pkHashConfigs, ",");
+                                                                                     List<PartitionData> datas = Lists.newArrayList();
+                                                                                     String[] pkHashConfigArray = StringUtils.split(pkHashConfigs,
+                                                                                         ",");
                                                                                      // schema.table:id^name
                                                                                      for (String pkHashConfig : pkHashConfigArray) {
                                                                                          PartitionData data = new PartitionData();
-                                                                                         int i = pkHashConfig
-                                                                                             .lastIndexOf(":");
+                                                                                         int i = pkHashConfig.lastIndexOf(":");
                                                                                          if (i > 0) {
-                                                                                             String pkStr = pkHashConfig
-                                                                                                 .substring(i + 1);
-                                                                                             if (pkStr.equalsIgnoreCase(
-                                                                                                 "$pk$")) {
+                                                                                             String pkStr = pkHashConfig.substring(i + 1);
+                                                                                             if (pkStr.equalsIgnoreCase("$pk$")) {
                                                                                                  data.hashMode.autoPkHash = true;
                                                                                              } else {
-                                                                                                 data.hashMode.pkNames = Lists
-                                                                                                     .newArrayList(
-                                                                                                         StringUtils
-                                                                                                             .split(
-                                                                                                                 pkStr,
-                                                                                                                 '^'));
+                                                                                                 data.hashMode.pkNames = Lists.newArrayList(StringUtils.split(pkStr,
+                                                                                                     '^'));
                                                                                              }
 
-                                                                                             pkHashConfig = pkHashConfig
-                                                                                                 .substring(0, i);
+                                                                                             pkHashConfig = pkHashConfig.substring(0,
+                                                                                                 i);
                                                                                          } else {
                                                                                              data.hashMode.tableHash = true;
                                                                                          }
 
-                                                                                         if (!isWildCard(
-                                                                                             pkHashConfig)) {
+                                                                                         if (!isWildCard(pkHashConfig)) {
                                                                                              data.simpleName = pkHashConfig;
                                                                                          } else {
-                                                                                             data.regexFilter = new AviaterRegexFilter(
-                                                                                                 pkHashConfig);
+                                                                                             data.regexFilter = new AviaterRegexFilter(pkHashConfig);
                                                                                          }
                                                                                          datas.add(data);
                                                                                      }
@@ -73,29 +69,24 @@ public class MQMessageUtils {
                                                                              });
 
     @SuppressWarnings("deprecation")
-    private static Map<String, List<DynamicTopicData>> dynamicTopicDatas = MigrateMap
-        .makeComputingMap(new MapMaker().softValues(), new Function<String, List<DynamicTopicData>>() {
+    private static Map<String, List<DynamicTopicData>> dynamicTopicDatas = MigrateMap.makeComputingMap(new MapMaker().softValues(),
+                                                                             new Function<String, List<DynamicTopicData>>() {
 
                                                                                  public List<DynamicTopicData> apply(String pkHashConfigs) {
-                                                                                     List<DynamicTopicData> datas = Lists
-                                                                                         .newArrayList();
-                                                                                     String[] dynamicTopicArray = StringUtils
-                                                                                         .split(pkHashConfigs, ",");
+                                                                                     List<DynamicTopicData> datas = Lists.newArrayList();
+                                                                                     String[] dynamicTopicArray = StringUtils.split(pkHashConfigs,
+                                                                                         ",");
                                                                                      // schema.table
                                                                                      for (String dynamicTopic : dynamicTopicArray) {
                                                                                          DynamicTopicData data = new DynamicTopicData();
 
-                                                                                         if (!isWildCard(
-                                                                                             dynamicTopic)) {
+                                                                                         if (!isWildCard(dynamicTopic)) {
                                                                                              data.simpleName = dynamicTopic;
                                                                                          } else {
-                                                                                             if (dynamicTopic
-                                                                                                 .contains("\\.")) {
-                                                                                                 data.tableRegexFilter = new AviaterRegexFilter(
-                                                                                                     dynamicTopic);
+                                                                                             if (dynamicTopic.contains("\\.")) {
+                                                                                                 data.tableRegexFilter = new AviaterRegexFilter(dynamicTopic);
                                                                                              } else {
-                                                                                                 data.schemaRegexFilter = new AviaterRegexFilter(
-                                                                                                     dynamicTopic);
+                                                                                                 data.schemaRegexFilter = new AviaterRegexFilter(dynamicTopic);
                                                                                              }
                                                                                          }
                                                                                          datas.add(data);
@@ -291,9 +282,8 @@ public class MQMessageUtils {
                 try {
                     rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
                 } catch (Exception e) {
-                    throw new RuntimeException(
-                        "ERROR ## parser of eromanga-event has an error , data:" + entry.toString(),
-                        e);
+                    throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:"
+                                               + entry.toString(), e);
                 }
 
                 CanalEntry.EventType eventType = rowChange.getEventType();
@@ -417,9 +407,9 @@ public class MQMessageUtils {
                         pkNames = flatMessage.getPkNames();
                     }
 
-                    int hashCode = table.hashCode();
                     int idx = 0;
                     for (Map<String, String> row : flatMessage.getData()) {
+                        int hashCode = table.hashCode();
                         if (!hashMode.tableHash) {
                             for (String pkName : pkNames) {
                                 String value = row.get(pkName);
@@ -555,8 +545,8 @@ public class MQMessageUtils {
 
     private static boolean isWildCard(String value) {
         // not contaiins '.' ?
-        return StringUtils.containsAny(value,
-            new char[] { '*', '?', '+', '|', '(', ')', '{', '}', '[', ']', '\\', '$', '^' });
+        return StringUtils.containsAny(value, new char[] { '*', '?', '+', '|', '(', ')', '{', '}', '[', ']', '\\', '$',
+                '^' });
     }
 
     private static void put2MapMessage(Map<String, Message> messageMap, Long messageId, String topicName,

+ 27 - 16
server/src/main/java/com/alibaba/otter/canal/common/MQProperties.java

@@ -1,5 +1,7 @@
 package com.alibaba.otter.canal.common;
 
+import java.util.Properties;
+
 /**
  * kafka 配置项
  *
@@ -8,22 +10,23 @@ package com.alibaba.otter.canal.common;
  */
 public class MQProperties {
 
-    private String  servers                = "127.0.0.1:6667";
-    private int     retries                = 0;
-    private int     batchSize              = 16384;
-    private int     lingerMs               = 1;
-    private int     maxRequestSize         = 1048576;
-    private long    bufferMemory           = 33554432L;
-    private boolean filterTransactionEntry = true;
-    private String  producerGroup          = "Canal-Producer";
-    private int     canalBatchSize         = 50;
-    private Long    canalGetTimeout        = 100L;
-    private boolean flatMessage            = true;
-    private String  compressionType        = "none";
-    private String  acks                   = "all";
-    private String  aliyunAccessKey        = "";
-    private String  aliyunSecretKey        = "";
-    private boolean transaction            = false;           // 是否开启事务
+    private String     servers                = "127.0.0.1:6667";
+    private int        retries                = 0;
+    private int        batchSize              = 16384;
+    private int        lingerMs               = 1;
+    private int        maxRequestSize         = 1048576;
+    private long       bufferMemory           = 33554432L;
+    private boolean    filterTransactionEntry = true;
+    private String     producerGroup          = "Canal-Producer";
+    private int        canalBatchSize         = 50;
+    private Long       canalGetTimeout        = 100L;
+    private boolean    flatMessage            = true;
+    private String     compressionType        = "none";
+    private String     acks                   = "all";
+    private String     aliyunAccessKey        = "";
+    private String     aliyunSecretKey        = "";
+    private boolean    transaction            = false;           // 是否开启事务
+    private Properties properties             = new Properties();
 
     public static class CanalDestination {
 
@@ -210,4 +213,12 @@ public class MQProperties {
     public void setTransaction(boolean transaction) {
         this.transaction = transaction;
     }
+
+    public Properties getProperties() {
+        return properties;
+    }
+
+    public void setProperties(Properties properties) {
+        this.properties = properties;
+    }
 }

+ 6 - 1
server/src/main/java/com/alibaba/otter/canal/kafka/CanalKafkaProducer.java

@@ -49,7 +49,12 @@ public class CanalKafkaProducer implements CanalMQProducer {
         properties.put("max.request.size", kafkaProperties.getMaxRequestSize());
         properties.put("buffer.memory", kafkaProperties.getBufferMemory());
         properties.put("key.serializer", StringSerializer.class.getName());
-        if(kafkaProperties.getTransaction()){
+
+        if (!kafkaProperties.getProperties().isEmpty()) {
+            properties.putAll(kafkaProperties.getProperties());
+        }
+
+        if (kafkaProperties.getTransaction()) {
             properties.put("transactional.id", "canal-transactional-id");
         } else {
             properties.put("retries", kafkaProperties.getRetries());

+ 12 - 4
server/src/main/java/com/alibaba/otter/canal/server/CanalMQStarter.java

@@ -133,8 +133,14 @@ public class CanalMQStarter {
     }
 
     private void worker(String destination, AtomicBoolean destinationRunning) {
-        while (!running || !destinationRunning.get())
-            ;
+        while (!running || !destinationRunning.get()) {
+            try {
+                Thread.sleep(100);
+            } catch (InterruptedException e) {
+                // ignore
+            }
+        }
+
         logger.info("## start the MQ producer: {}.", destination);
 
         final ClientIdentity clientIdentity = new ClientIdentity(destination, (short) 1001, "");
@@ -166,8 +172,10 @@ public class CanalMQStarter {
                 while (running && destinationRunning.get()) {
                     Message message;
                     if (getTimeout != null && getTimeout > 0) {
-                        message = canalServer
-                            .getWithoutAck(clientIdentity, getBatchSize, getTimeout, TimeUnit.MILLISECONDS);
+                        message = canalServer.getWithoutAck(clientIdentity,
+                            getBatchSize,
+                            getTimeout,
+                            TimeUnit.MILLISECONDS);
                     } else {
                         message = canalServer.getWithoutAck(clientIdentity, getBatchSize);
                     }

+ 1 - 1
sink/src/main/java/com/alibaba/otter/canal/sink/entry/group/GroupEventSink.java

@@ -51,7 +51,7 @@ public class GroupEventSink extends EntryEventSink {
             try {
                 barrier.await(event);// 进行timeline的归并调度处理
                 if (filterTransactionEntry) {
-                    return super.doSink(Arrays.asList(event));
+                    super.doSink(Arrays.asList(event));
                 } else if (i == size - 1) {
                     // 针对事务数据,只有到最后一条数据都通过后,才进行sink操作,保证原子性
                     // 同时批量sink,也要保证在最后一条数据释放状态之前写出数据,否则就有并发问题