瀏覽代碼

Merge pull request #1607 from rewerma/master

ES同步新增父子文档索引适配
agapple 6 年之前
父節點
當前提交
3b618740d4
共有 45 個文件被更改,包括 1586 次插入1241 次删除
  1. 1 1
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java
  2. 1 1
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java
  3. 1 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java
  4. 29 23
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/ExtensionLoader.java
  5. 2 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java
  6. 4 4
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Result.java
  7. 52 53
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java
  8. 6 0
      client-adapter/elasticsearch/pom.xml
  9. 2 2
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java
  10. 45 15
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java
  11. 3 2
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java
  12. 18 8
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/monitor/ESConfigMonitor.java
  13. 63 35
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java
  14. 17 16
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java
  15. 7 49
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java
  16. 111 15
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java
  17. 21 0
      client-adapter/elasticsearch/src/main/resources/es/biz_order.yml
  18. 47 0
      client-adapter/elasticsearch/src/main/resources/es/customer.yml
  19. 1 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java
  20. 118 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ESTest.java
  21. 2 2
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/TestConstant.java
  22. 1 1
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java
  23. 1 1
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java
  24. 16 4
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java
  25. 391 397
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java
  26. 3 3
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java
  27. 1 3
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhTypeUtil.java
  28. 1 1
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/EtlLock.java
  29. 4 4
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/SyncSwitch.java
  30. 2 2
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java
  31. 7 5
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java
  32. 15 23
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java
  33. 8 5
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java
  34. 1 1
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterWorker.java
  35. 4 2
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/ApplicationConfigMonitor.java
  36. 8 3
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java
  37. 9 4
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/RemoteAdapterMonitorImpl.java
  38. 17 17
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java
  39. 1 1
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java
  40. 11 11
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java
  41. 16 4
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java
  42. 7 11
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java
  43. 507 507
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java
  44. 3 4
      client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/DBTest.java
  45. 1 1
      client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/TestConstant.java

+ 1 - 1
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java

@@ -84,7 +84,7 @@ public class YmlConfigBinder {
         try {
             byte[] contentBytes;
             if (charset == null) {
-                contentBytes = content.getBytes();
+                contentBytes = content.getBytes("UTF-8");
             } else {
                 contentBytes = content.getBytes(charset);
             }

+ 1 - 1
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java

@@ -89,7 +89,7 @@ class RelaxedConversionService implements ConversionService {
             return new RelaxedConversionService.StringToEnumIgnoringCaseConverterFactory.StringToEnum(enumType);
         }
 
-        private class StringToEnum<T extends Enum> implements Converter<String, T> {
+        private static class StringToEnum<T extends Enum> implements Converter<String, T> {
 
             private final Class<T> enumType;
 

+ 1 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java

@@ -191,6 +191,7 @@ public abstract class YamlProcessor {
     }
 
     private boolean process(Map<String, Object> map, MatchCallback callback) {
+        @SuppressWarnings("serial")
         Properties properties = new Properties() {
 
             @Override

+ 29 - 23
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/ExtensionLoader.java

@@ -6,6 +6,7 @@ import java.io.InputStreamReader;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.URLClassLoader;
+import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.Enumeration;
@@ -27,7 +28,8 @@ import org.slf4j.LoggerFactory;
  */
 public class ExtensionLoader<T> {
 
-    private static final Logger                                      logger                     = LoggerFactory.getLogger(ExtensionLoader.class);
+    private static final Logger                                      logger                     = LoggerFactory
+        .getLogger(ExtensionLoader.class);
 
     private static final String                                      SERVICES_DIRECTORY         = "META-INF/services/";
 
@@ -35,7 +37,8 @@ public class ExtensionLoader<T> {
 
     private static final String                                      DEFAULT_CLASSLOADER_POLICY = "internal";
 
-    private static final Pattern                                     NAME_SEPARATOR             = Pattern.compile("\\s*[,]+\\s*");
+    private static final Pattern                                     NAME_SEPARATOR             = Pattern
+        .compile("\\s*[,]+\\s*");
 
     private static final ConcurrentMap<Class<?>, ExtensionLoader<?>> EXTENSION_LOADERS          = new ConcurrentHashMap<>();
 
@@ -171,7 +174,8 @@ public class ExtensionLoader<T> {
             return instance;
         } catch (Throwable t) {
             throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
-                                            + ")  could not be instantiated: " + t.getMessage(), t);
+                                            + ")  could not be instantiated: " + t.getMessage(),
+                t);
         }
     }
 
@@ -191,7 +195,8 @@ public class ExtensionLoader<T> {
             return instance;
         } catch (Throwable t) {
             throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
-                                            + ")  could not be instantiated: " + t.getMessage(), t);
+                                            + ")  could not be instantiated: " + t.getMessage(),
+                t);
         }
     }
 
@@ -233,7 +238,11 @@ public class ExtensionLoader<T> {
                                          // button.
             jarPath = jarPath.replaceAll("/classes/.*", "/classes/");
         }
-        return Paths.get(jarPath).getParent().toString(); // Paths - from java 8
+        Path path = Paths.get(jarPath).getParent(); // Paths - from java 8
+        if (path != null) {
+            return path.toString();
+        }
+        return null;
     }
 
     private Map<String, Class<?>> loadExtensionClasses() {
@@ -330,12 +339,10 @@ public class ExtensionLoader<T> {
                                             // Class.forName(line, true,
                                             // classLoader);
                                             if (!type.isAssignableFrom(clazz)) {
-                                                throw new IllegalStateException("Error when load extension class(interface: "
-                                                                                + type
-                                                                                + ", class line: "
-                                                                                + clazz.getName()
-                                                                                + "), class "
-                                                                                + clazz.getName()
+                                                throw new IllegalStateException(
+                                                    "Error when load extension class(interface: " + type
+                                                                                + ", class line: " + clazz.getName()
+                                                                                + "), class " + clazz.getName()
                                                                                 + "is not subtype of interface.");
                                             } else {
                                                 try {
@@ -353,9 +360,9 @@ public class ExtensionLoader<T> {
                                                                 extensionClasses.put(n, clazz);
                                                             } else if (c != clazz) {
                                                                 cachedNames.remove(clazz);
-                                                                throw new IllegalStateException("Duplicate extension "
-                                                                                                + type.getName()
-                                                                                                + " name " + n + " on "
+                                                                throw new IllegalStateException(
+                                                                    "Duplicate extension " + type.getName() + " name "
+                                                                                                + n + " on "
                                                                                                 + c.getName() + " and "
                                                                                                 + clazz.getName());
                                                             }
@@ -365,12 +372,9 @@ public class ExtensionLoader<T> {
                                             }
                                         }
                                     } catch (Throwable t) {
-                                        IllegalStateException e = new IllegalStateException("Failed to load extension class(interface: "
-                                                                                            + type
-                                                                                            + ", class line: "
-                                                                                            + line
-                                                                                            + ") in "
-                                                                                            + url
+                                        IllegalStateException e = new IllegalStateException(
+                                            "Failed to load extension class(interface: " + type + ", class line: "
+                                                                                            + line + ") in " + url
                                                                                             + ", cause: "
                                                                                             + t.getMessage(),
                                             t);
@@ -385,13 +389,15 @@ public class ExtensionLoader<T> {
                         }
                     } catch (Throwable t) {
                         logger.error("Exception when load extension class(interface: " + type + ", class file: " + url
-                                     + ") in " + url, t);
+                                     + ") in " + url,
+                            t);
                     }
                 } // end of while urls
             }
         } catch (Throwable t) {
-            logger.error("Exception when load extension class(interface: " + type + ", description file: " + fileName
-                         + ").", t);
+            logger.error(
+                "Exception when load extension class(interface: " + type + ", description file: " + fileName + ").",
+                t);
         }
     }
 

+ 2 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java

@@ -155,9 +155,11 @@ public class JdbcTypeUtil {
                     } else {
                         res = null;
                     }
+                    break;
                 case Types.CLOB:
                 default:
                     res = value;
+                    break;
             }
             return res;
         } catch (Exception e) {

+ 4 - 4
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Result.java

@@ -12,10 +12,10 @@ import java.util.Date;
 public class Result implements Serializable {
 
     private static final long serialVersionUID = -3276409502352405716L;
-    public Integer code = 20000;
-    public Object  data;
-    public String  message;
-    public Date    sysTime;
+    private Integer           code             = 20000;
+    private Object            data;
+    private String            message;
+    private Date              sysTime;
 
     public static Result createSuccess(String message) {
         Result result = new Result();

+ 52 - 53
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java

@@ -2,11 +2,17 @@ package com.alibaba.otter.canal.client.adapter.support;
 
 import java.io.File;
 import java.net.URL;
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
 import java.time.*;
 import java.time.format.DateTimeFormatter;
 import java.util.Date;
 import java.util.TimeZone;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.function.Consumer;
 import java.util.function.Function;
 
@@ -29,38 +35,16 @@ public class Util {
     /**
      * 通过DS执行sql
      */
-    public static Object sqlRS(DataSource ds, String sql, Function<ResultSet, Object> fun) throws SQLException {
-        Connection conn = null;
-        Statement stmt = null;
-        ResultSet rs = null;
-        try {
-            conn = ds.getConnection();
-            stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    public static Object sqlRS(DataSource ds, String sql, Function<ResultSet, Object> fun) {
+        try (Connection conn = ds.getConnection();
+                Statement stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);) {
             stmt.setFetchSize(Integer.MIN_VALUE);
-            rs = stmt.executeQuery(sql);
-            return fun.apply(rs);
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
-            if (stmt != null) {
-                try {
-                    stmt.close();
-                } catch (SQLException e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
-            if (conn != null) {
-                try {
-                    conn.close();
-                } catch (SQLException e) {
-                    logger.error(e.getMessage(), e);
-                }
+            try (ResultSet rs = stmt.executeQuery(sql);) {
+                return fun.apply(rs);
             }
+        } catch (Exception e) {
+            logger.error("sqlRs has error, sql: {} ", sql);
+            throw new RuntimeException(e);
         }
     }
 
@@ -72,29 +56,10 @@ public class Util {
      * @param consumer 回调方法
      */
     public static void sqlRS(Connection conn, String sql, Consumer<ResultSet> consumer) {
-        Statement stmt = null;
-        ResultSet rs = null;
-        try {
-            stmt = conn.createStatement();
-            rs = stmt.executeQuery(sql);
+        try (Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery(sql)) {
             consumer.accept(rs);
         } catch (SQLException e) {
             logger.error(e.getMessage(), e);
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
-            if (stmt != null) {
-                try {
-                    stmt.close();
-                } catch (SQLException e) {
-                    logger.error(e.getMessage(), e);
-                }
-            }
         }
     }
 
@@ -144,7 +109,41 @@ public class Util {
         return column;
     }
 
-    public static String timeZone; // 当前时区
+    public static ThreadPoolExecutor newFixedThreadPool(int nThreads, long keepAliveTime) {
+        return new ThreadPoolExecutor(nThreads,
+            nThreads,
+            keepAliveTime,
+            TimeUnit.MILLISECONDS,
+            new SynchronousQueue<>(),
+            (r, exe) -> {
+                if (!exe.isShutdown()) {
+                    try {
+                        exe.getQueue().put(r);
+                    } catch (InterruptedException e) {
+                        // ignore
+                    }
+                }
+            });
+    }
+
+    public static ThreadPoolExecutor newSingleThreadExecutor(long keepAliveTime) {
+        return new ThreadPoolExecutor(1,
+            1,
+            keepAliveTime,
+            TimeUnit.MILLISECONDS,
+            new SynchronousQueue<>(),
+            (r, exe) -> {
+                if (!exe.isShutdown()) {
+                    try {
+                        exe.getQueue().put(r);
+                    } catch (InterruptedException e) {
+                        // ignore
+                    }
+                }
+            });
+    }
+
+    public final static String  timeZone;    // 当前时区
     private static DateTimeZone dateTimeZone;
 
     static {
@@ -265,7 +264,7 @@ public class Util {
                 LocalDateTime localDateTime = LocalDateTime.of(localDate, localTime);
                 return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant());
             }
-        } catch (Exception e) {
+        } catch (Throwable e) {
             logger.error(e.getMessage(), e);
         }
 

+ 6 - 0
client-adapter/elasticsearch/pom.xml

@@ -40,6 +40,12 @@
             <version>4.12</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>5.1.40</version>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <build>

+ 2 - 2
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java

@@ -133,8 +133,8 @@ public class ESAdapter implements OuterAdapter {
             esSyncService = new ESSyncService(esTemplate);
 
             esConfigMonitor = new ESConfigMonitor();
-            esConfigMonitor.init(this);
-        } catch (Exception e) {
+            esConfigMonitor.init(this, envProperties);
+        } catch (Throwable e) {
             throw new RuntimeException(e);
         }
     }

+ 45 - 15
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java

@@ -31,7 +31,7 @@ public class ESSyncConfig {
             throw new NullPointerException("esMapping._type");
         }
         if (esMapping._id == null && esMapping.getPk() == null) {
-            throw new NullPointerException("esMapping._id and esMapping.pk");
+            throw new NullPointerException("esMapping._id or esMapping.pk");
         }
         if (esMapping.sql == null) {
             throw new NullPointerException("esMapping.sql");
@@ -80,23 +80,23 @@ public class ESSyncConfig {
 
     public static class ESMapping {
 
-        private String              _index;
-        private String              _type;
-        private String              _id;
-        private boolean             upsert          = false;
-        private String              pk;
-        // private String parent;
-        private String              sql;
+        private String                       _index;
+        private String                       _type;
+        private String                       _id;
+        private boolean                      upsert          = false;
+        private String                       pk;
+        private Map<String, RelationMapping> relations       = new LinkedHashMap<>();
+        private String                       sql;
         // 对象字段, 例: objFields:
         // - _labels: array:;
-        private Map<String, String> objFields       = new LinkedHashMap<>();
-        private List<String>        skips           = new ArrayList<>();
-        private int                 commitBatch     = 1000;
-        private String              etlCondition;
-        private boolean             syncByTimestamp = false;                // 是否按时间戳定时同步
-        private Long                syncInterval;                           // 同步时间间隔
+        private Map<String, String>          objFields       = new LinkedHashMap<>();
+        private List<String>                 skips           = new ArrayList<>();
+        private int                          commitBatch     = 1000;
+        private String                       etlCondition;
+        private boolean                      syncByTimestamp = false;                // 是否按时间戳定时同步
+        private Long                         syncInterval;                           // 同步时间间隔
 
-        private SchemaItem          schemaItem;                             // sql解析结果模型
+        private SchemaItem                   schemaItem;                             // sql解析结果模型
 
         public String get_index() {
             return _index;
@@ -154,6 +154,14 @@ public class ESSyncConfig {
             this.skips = skips;
         }
 
+        public Map<String, RelationMapping> getRelations() {
+            return relations;
+        }
+
+        public void setRelations(Map<String, RelationMapping> relations) {
+            this.relations = relations;
+        }
+
         public String getSql() {
             return sql;
         }
@@ -202,4 +210,26 @@ public class ESSyncConfig {
             this.schemaItem = schemaItem;
         }
     }
+
+    public static class RelationMapping {
+
+        private String name;
+        private String parent;
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public String getParent() {
+            return parent;
+        }
+
+        public void setParent(String parent) {
+            this.parent = parent;
+        }
+    }
 }

+ 3 - 2
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java

@@ -278,12 +278,13 @@ public class SchemaItem {
             if (relationSelectFieldItems == null) {
                 synchronized (SchemaItem.class) {
                     if (relationSelectFieldItems == null) {
-                        relationSelectFieldItems = new ArrayList<>();
+                        List<FieldItem> relationSelectFieldItemsTmp = new ArrayList<>();
                         for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
                             if (fieldItem.getOwners().contains(getAlias())) {
-                                relationSelectFieldItems.add(fieldItem);
+                                relationSelectFieldItemsTmp.add(fieldItem);
                             }
                         }
+                        relationSelectFieldItems = relationSelectFieldItemsTmp;
                     }
                 }
             }

+ 18 - 8
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/monitor/ESConfigMonitor.java

@@ -3,6 +3,7 @@ package com.alibaba.otter.canal.client.adapter.es.monitor;
 import java.io.File;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Properties;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -12,9 +13,9 @@ import org.apache.commons.io.monitor.FileAlterationMonitor;
 import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
 
 import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
 import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
 import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
 import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
@@ -31,10 +32,13 @@ public class ESConfigMonitor {
 
     private ESAdapter             esAdapter;
 
+    private Properties            envProperties;
+
     private FileAlterationMonitor fileMonitor;
 
-    public void init(ESAdapter esAdapter) {
+    public void init(ESAdapter esAdapter, Properties envProperties) {
         this.esAdapter = esAdapter;
+        this.envProperties = envProperties;
         File confDir = Util.getConfDirPath(adapterName);
         try {
             FileAlterationObserver observer = new FileAlterationObserver(confDir,
@@ -65,11 +69,13 @@ public class ESConfigMonitor {
             try {
                 // 加载新增的配置文件
                 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
-                ESSyncConfig config = new Yaml().loadAs(configContent, ESSyncConfig.class);
-                config.validate();
-                addConfigToCache(file, config);
-
-                logger.info("Add a new es mapping config: {} to canal adapter", file.getName());
+                ESSyncConfig config = YmlConfigBinder
+                    .bindYmlToObj(null, configContent, ESSyncConfig.class, null, envProperties);
+                if (config != null) {
+                    config.validate();
+                    addConfigToCache(file, config);
+                    logger.info("Add a new es mapping config: {} to canal adapter", file.getName());
+                }
             } catch (Exception e) {
                 logger.error(e.getMessage(), e);
             }
@@ -88,7 +94,11 @@ public class ESConfigMonitor {
                         onFileDelete(file);
                         return;
                     }
-                    ESSyncConfig config = new Yaml().loadAs(configContent, ESSyncConfig.class);
+                    ESSyncConfig config = YmlConfigBinder
+                        .bindYmlToObj(null, configContent, ESSyncConfig.class, null, envProperties);
+                    if (config == null) {
+                        return;
+                    }
                     config.validate();
                     if (esAdapter.getEsSyncConfig().containsKey(file.getName())) {
                         deleteConfigFromCache(file);

+ 63 - 35
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java

@@ -1,25 +1,23 @@
 package com.alibaba.otter.canal.client.adapter.es.service;
 
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
+import java.sql.SQLException;
+import java.util.*;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import javax.sql.DataSource;
 
+import org.apache.commons.lang.StringUtils;
 import org.elasticsearch.action.bulk.BulkItemResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.SearchType;
+import org.elasticsearch.action.update.UpdateRequestBuilder;
 import org.elasticsearch.client.transport.TransportClient;
-import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.search.SearchHit;
@@ -30,10 +28,10 @@ import com.alibaba.druid.pool.DruidDataSource;
 import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
 import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
 import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
-import com.alibaba.otter.canal.client.adapter.es.support.ESSyncUtil;
 import com.alibaba.otter.canal.client.adapter.es.support.ESTemplate;
 import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
 import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.google.common.base.Joiner;
 
 /**
@@ -100,7 +98,7 @@ public class ESEtlService {
 
             // 获取总数
             String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
-            long cnt = (Long) ESSyncUtil.sqlRS(dataSource, countSql, rs -> {
+            long cnt = (Long) Util.sqlRS(dataSource, countSql, rs -> {
                 Long count = null;
                 try {
                     if (rs.next()) {
@@ -116,8 +114,7 @@ public class ESEtlService {
             if (cnt >= 10000) {
                 int threadCount = 3; // 从配置读取默认为3
                 long perThreadCnt = cnt / threadCount;
-                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                ExecutorService executor = Util.newFixedThreadPool(threadCount, 5000L);
                 for (int i = 0; i < threadCount; i++) {
                     long offset = i * perThreadCnt;
                     Long size = null;
@@ -130,16 +127,13 @@ public class ESEtlService {
                     } else {
                         sqlFinal = sql + " LIMIT " + offset + "," + cnt;
                     }
-                    Future<Boolean> future = executor
-                        .submit(() -> executeSqlImport(dataSource, sqlFinal, mapping, impCount, errMsg));
-                    futures.add(future);
-                }
-
-                for (Future<Boolean> future : futures) {
-                    future.get();
+                    executor.execute(() -> executeSqlImport(dataSource, sqlFinal, mapping, impCount, errMsg));
                 }
 
                 executor.shutdown();
+                while (!executor.awaitTermination(3, TimeUnit.SECONDS)) {
+                    // ignore
+                }
             } else {
                 executeSqlImport(dataSource, sql, mapping, impCount, errMsg);
             }
@@ -176,7 +170,7 @@ public class ESEtlService {
     private boolean executeSqlImport(DataSource ds, String sql, ESMapping mapping, AtomicLong impCount,
                                      List<String> errMsg) {
         try {
-            ESSyncUtil.sqlRS(ds, sql, rs -> {
+            Util.sqlRS(ds, sql, rs -> {
                 int count = 0;
                 try {
                     BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
@@ -184,39 +178,73 @@ public class ESEtlService {
                     long batchBegin = System.currentTimeMillis();
                     while (rs.next()) {
                         Map<String, Object> esFieldData = new LinkedHashMap<>();
+                        Object idVal = null;
                         for (FieldItem fieldItem : mapping.getSchemaItem().getSelectFields().values()) {
 
-                            // 如果是主键字段则不插入
-                            if (fieldItem.getFieldName().equals(mapping.get_id())) {
-                                continue;
-                            }
-
                             String fieldName = fieldItem.getFieldName();
                             if (mapping.getSkips().contains(fieldName)) {
                                 continue;
                             }
 
-                            Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName);
-                            esFieldData.put(fieldName, val);
+                            // 如果是主键字段则不插入
+                            if (fieldItem.getFieldName().equals(mapping.get_id())) {
+                                idVal = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName);
+                            } else {
+                                Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName);
+                                esFieldData.put(Util.cleanColumn(fieldName), val);
+                            }
+
                         }
-                        Object idVal = null;
-                        if (mapping.get_id() != null) {
-                            idVal = rs.getObject(mapping.get_id());
+
+                        if (!mapping.getRelations().isEmpty()) {
+                            mapping.getRelations().forEach((relationField, relationMapping) -> {
+                                Map<String, Object> relations = new HashMap<>();
+                                relations.put("name", relationMapping.getName());
+                                if (StringUtils.isNotEmpty(relationMapping.getParent())) {
+                                    FieldItem parentFieldItem = mapping.getSchemaItem()
+                                        .getSelectFields()
+                                        .get(relationMapping.getParent());
+                                    Object parentVal;
+                                    try {
+                                        parentVal = esTemplate.getValFromRS(mapping,
+                                            rs,
+                                            parentFieldItem.getFieldName(),
+                                            parentFieldItem.getFieldName());
+                                    } catch (SQLException e) {
+                                        throw new RuntimeException(e);
+                                    }
+                                    if (parentVal != null) {
+                                        relations.put("parent", parentVal.toString());
+                                        esFieldData.put("$parent_routing", parentVal.toString());
+
+                                    }
+                                }
+                                esFieldData.put(Util.cleanColumn(relationField), relations);
+                            });
                         }
 
                         if (idVal != null) {
+                            String parentVal = (String) esFieldData.remove("$parent_routing");
                             if (mapping.isUpsert()) {
-                                bulkRequestBuilder.add(transportClient
+                                UpdateRequestBuilder updateRequestBuilder = transportClient
                                     .prepareUpdate(mapping.get_index(), mapping.get_type(), idVal.toString())
                                     .setDoc(esFieldData)
-                                    .setDocAsUpsert(true));
+                                    .setDocAsUpsert(true);
+                                if (StringUtils.isNotEmpty(parentVal)) {
+                                    updateRequestBuilder.setRouting(parentVal);
+                                }
+                                bulkRequestBuilder.add(updateRequestBuilder);
                             } else {
-                                bulkRequestBuilder.add(transportClient
+                                IndexRequestBuilder indexRequestBuilder = transportClient
                                     .prepareIndex(mapping.get_index(), mapping.get_type(), idVal.toString())
-                                    .setSource(esFieldData));
+                                    .setSource(esFieldData);
+                                if (StringUtils.isNotEmpty(parentVal)) {
+                                    indexRequestBuilder.setRouting(parentVal);
+                                }
+                                bulkRequestBuilder.add(indexRequestBuilder);
                             }
                         } else {
-                            idVal = rs.getObject(mapping.getPk());
+                            idVal = esFieldData.get(mapping.getPk());
                             SearchResponse response = transportClient.prepareSearch(mapping.get_index())
                                 .setTypes(mapping.get_type())
                                 .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal))

+ 17 - 16
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java

@@ -7,6 +7,7 @@ import java.util.Map;
 
 import javax.sql.DataSource;
 
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,7 +44,7 @@ public class ESSyncService {
         long begin = System.currentTimeMillis();
         if (esSyncConfigs != null) {
             if (logger.isTraceEnabled()) {
-                logger.trace("Destination: {}, database:{}, table:{}, type:{}, effect index count: {}",
+                logger.trace("Destination: {}, database:{}, table:{}, type:{}, affected index count: {}",
                     dml.getDestination(),
                     dml.getDatabase(),
                     dml.getTable(),
@@ -65,7 +66,7 @@ public class ESSyncService {
                 }
             }
             if (logger.isTraceEnabled()) {
-                logger.trace("Sync elapsed time: {} ms, effect index count:{}, destination: {}",
+                logger.trace("Sync elapsed time: {} ms, affected indexes count:{}, destination: {}",
                     (System.currentTimeMillis() - begin),
                     esSyncConfigs.size(),
                     dml.getDestination());
@@ -74,7 +75,7 @@ public class ESSyncService {
                 StringBuilder configIndexes = new StringBuilder();
                 esSyncConfigs
                     .forEach(esSyncConfig -> configIndexes.append(esSyncConfig.getEsMapping().get_index()).append(" "));
-                logger.debug("DML: {} \nEffect indexes: {}",
+                logger.debug("DML: {} \nAffected indexes: {}",
                     JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue),
                     configIndexes.toString());
             }
@@ -166,7 +167,7 @@ public class ESSyncService {
                                     data,
                                     fieldItem.getFieldName(),
                                     fieldItem.getColumn().getColumnName());
-                                esFieldData.put(fieldItem.getFieldName(), value);
+                                esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value);
                             }
 
                             joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
@@ -295,7 +296,7 @@ public class ESSyncService {
                                         data,
                                         fieldItem.getFieldName(),
                                         fieldItem.getColumn().getColumnName());
-                                    esFieldData.put(fieldItem.getFieldName(), value);
+                                    esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value);
                                 }
                             }
                             joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
@@ -407,7 +408,7 @@ public class ESSyncService {
                         // ------关联表简单字段更新为null------
                         Map<String, Object> esFieldData = new LinkedHashMap<>();
                         for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
-                            esFieldData.put(fieldItem.getFieldName(), null);
+                            esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), null);
                         }
                         joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
                     } else {
@@ -464,7 +465,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 sql.replace("\n", " "));
         }
-        ESSyncUtil.sqlRS(ds, sql, rs -> {
+        Util.sqlRS(ds, sql, rs -> {
             try {
                 while (rs.next()) {
                     Map<String, Object> esFieldData = new LinkedHashMap<>();
@@ -500,7 +501,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 sql.replace("\n", " "));
         }
-        ESSyncUtil.sqlRS(ds, sql, rs -> {
+        Util.sqlRS(ds, sql, rs -> {
             try {
                 Map<String, Object> esFieldData = null;
                 if (mapping.getPk() != null) {
@@ -508,7 +509,7 @@ public class ESSyncService {
                     esTemplate.getESDataFromDmlData(mapping, data, esFieldData);
                     esFieldData.remove(mapping.getPk());
                     for (String key : esFieldData.keySet()) {
-                        esFieldData.put(key, null);
+                        esFieldData.put(Util.cleanColumn(key), null);
                     }
                 }
                 while (rs.next()) {
@@ -601,7 +602,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 sql.toString().replace("\n", " "));
         }
-        ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+        Util.sqlRS(ds, sql.toString(), rs -> {
             try {
                 while (rs.next()) {
                     Map<String, Object> esFieldData = new LinkedHashMap<>();
@@ -617,7 +618,7 @@ public class ESSyncService {
                                                     rs,
                                                     fieldItem.getFieldName(),
                                                     fieldItem.getColumn().getColumnName());
-                                                esFieldData.put(fieldItem.getFieldName(), val);
+                                                esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), val);
                                                 break out;
                                             }
                                         }
@@ -628,7 +629,7 @@ public class ESSyncService {
                                 rs,
                                 fieldItem.getFieldName(),
                                 fieldItem.getColumn().getColumnName());
-                            esFieldData.put(fieldItem.getFieldName(), val);
+                            esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), val);
                         }
                     }
 
@@ -693,7 +694,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 sql.toString().replace("\n", " "));
         }
-        ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+        Util.sqlRS(ds, sql.toString(), rs -> {
             try {
                 while (rs.next()) {
                     Map<String, Object> esFieldData = new LinkedHashMap<>();
@@ -724,7 +725,7 @@ public class ESSyncService {
                                                 rs,
                                                 fieldItem.getFieldName(),
                                                 fieldItem.getFieldName());
-                                            esFieldData.put(fieldItem.getFieldName(), val);
+                                            esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), val);
                                             break;
                                         }
                                     }
@@ -733,7 +734,7 @@ public class ESSyncService {
                         } else {
                             Object val = esTemplate
                                 .getValFromRS(mapping, rs, fieldItem.getFieldName(), fieldItem.getFieldName());
-                            esFieldData.put(fieldItem.getFieldName(), val);
+                            esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), val);
                         }
                     }
 
@@ -812,7 +813,7 @@ public class ESSyncService {
                 mapping.get_index(),
                 sql.replace("\n", " "));
         }
-        ESSyncUtil.sqlRS(ds, sql, rs -> {
+        Util.sqlRS(ds, sql, rs -> {
             try {
                 while (rs.next()) {
                     Map<String, Object> esFieldData = new LinkedHashMap<>();

+ 7 - 49
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java

@@ -3,12 +3,9 @@ package com.alibaba.otter.canal.client.adapter.es.support;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.StandardCharsets;
-import java.sql.*;
+import java.sql.Blob;
+import java.sql.SQLException;
 import java.util.*;
-import java.util.Date;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
 
 import org.apache.commons.codec.binary.Base64;
 import org.joda.time.DateTime;
@@ -234,8 +231,11 @@ public class ESSyncUtil {
     private static byte[] blobToBytes(Blob blob) {
         try (InputStream is = blob.getBinaryStream()) {
             byte[] b = new byte[(int) blob.length()];
-            is.read(b);
-            return b;
+            if (is.read(b) != -1) {
+                return b;
+            } else {
+                return new byte[0];
+            }
         } catch (IOException | SQLException e) {
             logger.error(e.getMessage());
             return null;
@@ -297,46 +297,4 @@ public class ESSyncUtil {
             sql.append(owner).append(".").append(columnName).append("=").append(value).append("  AND ");
         }
     }
-
-    /**
-     * 执行查询sql
-     */
-    public static Object sqlRS(DataSource ds, String sql, Function<ResultSet, Object> fun) {
-        Connection conn = null;
-        Statement smt = null;
-        ResultSet rs = null;
-        try {
-            conn = ds.getConnection();
-            smt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-            smt.setFetchSize(Integer.MIN_VALUE);
-            rs = smt.executeQuery(sql);
-
-            return fun.apply(rs);
-        } catch (SQLException e) {
-            logger.error("sqlRs has error, sql: {} ", sql);
-            throw new RuntimeException(e);
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    logger.error("error to close result set");
-                }
-            }
-            if (smt != null) {
-                try {
-                    smt.close();
-                } catch (SQLException e) {
-                    logger.error("error to close statement");
-                }
-            }
-            if (conn != null) {
-                try {
-                    conn.close();
-                } catch (SQLException e) {
-                    logger.error("error to close db connection");
-                }
-            }
-        }
-    }
 }

+ 111 - 15
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java

@@ -2,6 +2,7 @@ package com.alibaba.otter.canal.client.adapter.es.support;
 
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -9,10 +10,13 @@ import java.util.concurrent.ConcurrentMap;
 
 import javax.sql.DataSource;
 
+import org.apache.commons.lang.StringUtils;
 import org.elasticsearch.action.bulk.BulkItemResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.update.UpdateRequestBuilder;
 import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.cluster.metadata.MappingMetaData;
 import org.elasticsearch.common.collect.ImmutableOpenMap;
@@ -29,6 +33,7 @@ import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
 import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
 import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
 import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Util;
 
 /**
  * ES 操作模板
@@ -64,13 +69,24 @@ public class ESTemplate {
      */
     public void insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
         if (mapping.get_id() != null) {
+            String parentVal = (String) esFieldData.remove("$parent_routing");
             if (mapping.isUpsert()) {
-                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                UpdateRequestBuilder updateRequestBuilder = transportClient
+                    .prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
                     .setDoc(esFieldData)
-                    .setDocAsUpsert(true));
+                    .setDocAsUpsert(true);
+                if (StringUtils.isNotEmpty(parentVal)) {
+                    updateRequestBuilder.setRouting(parentVal);
+                }
+                getBulk().add(updateRequestBuilder);
             } else {
-                getBulk().add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
-                    .setSource(esFieldData));
+                IndexRequestBuilder indexRequestBuilder = transportClient
+                    .prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setSource(esFieldData);
+                if (StringUtils.isNotEmpty(parentVal)) {
+                    indexRequestBuilder.setRouting(parentVal);
+                }
+                getBulk().add(indexRequestBuilder);
             }
             commitBulk();
         } else {
@@ -96,7 +112,9 @@ public class ESTemplate {
      * @param esFieldData 数据Map
      */
     public void update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
-        append4Update(mapping, pkVal, esFieldData);
+        Map<String, Object> esFieldDataTmp = new LinkedHashMap<>(esFieldData.size());
+        esFieldData.forEach((k, v) -> esFieldDataTmp.put(Util.cleanColumn(k), v));
+        append4Update(mapping, pkVal, esFieldDataTmp);
         commitBulk();
     }
 
@@ -122,7 +140,7 @@ public class ESTemplate {
             (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
         int len = sql.length();
         sql.delete(len - 4, len);
-        Integer syncCount = (Integer) ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+        Integer syncCount = (Integer) Util.sqlRS(ds, sql.toString(), rs -> {
             int count = 0;
             try {
                 while (rs.next()) {
@@ -137,7 +155,7 @@ public class ESTemplate {
             return count;
         });
         if (logger.isTraceEnabled()) {
-            logger.trace("Update ES by query effect {} records", syncCount);
+            logger.trace("Update ES by query affected {} records", syncCount);
         }
     }
 
@@ -200,13 +218,24 @@ public class ESTemplate {
 
     private void append4Update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
         if (mapping.get_id() != null) {
+            String parentVal = (String) esFieldData.remove("$parent_routing");
             if (mapping.isUpsert()) {
-                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                UpdateRequestBuilder updateRequestBuilder = transportClient
+                    .prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
                     .setDoc(esFieldData)
-                    .setDocAsUpsert(true));
+                    .setDocAsUpsert(true);
+                if (StringUtils.isNotEmpty(parentVal)) {
+                    updateRequestBuilder.setRouting(parentVal);
+                }
+                getBulk().add(updateRequestBuilder);
             } else {
-                getBulk().add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
-                    .setDoc(esFieldData));
+                UpdateRequestBuilder updateRequestBuilder = transportClient
+                    .prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData);
+                if (StringUtils.isNotEmpty(parentVal)) {
+                    updateRequestBuilder.setRouting(parentVal);
+                }
+                getBulk().add(updateRequestBuilder);
             }
         } else {
             SearchResponse response = transportClient.prepareSearch(mapping.get_index())
@@ -223,6 +252,8 @@ public class ESTemplate {
 
     public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName,
                                String columnName) throws SQLException {
+        fieldName = Util.cleanColumn(fieldName);
+        columnName = Util.cleanColumn(columnName);
         String esType = getEsType(mapping, fieldName);
 
         Object value = resultSet.getObject(columnName);
@@ -254,9 +285,13 @@ public class ESTemplate {
 
             if (!fieldItem.getFieldName().equals(mapping.get_id())
                 && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(), value);
+                esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value);
             }
         }
+
+        // 添加父子文档关联信息
+        putRelationDataFromRS(mapping, schemaItem, resultSet, esFieldData);
+
         return resultIdVal;
     }
 
@@ -288,12 +323,16 @@ public class ESTemplate {
             for (ColumnItem columnItem : fieldItem.getColumnItems()) {
                 if (dmlOld.containsKey(columnItem.getColumnName())
                     && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                    esFieldData.put(fieldItem.getFieldName(),
+                    esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()),
                         getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()));
                     break;
                 }
             }
         }
+
+        // 添加父子文档关联信息
+        putRelationDataFromRS(mapping, schemaItem, resultSet, esFieldData);
+
         return resultIdVal;
     }
 
@@ -337,9 +376,12 @@ public class ESTemplate {
 
             if (!fieldItem.getFieldName().equals(mapping.get_id())
                 && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(), value);
+                esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value);
             }
         }
+
+        // 添加父子文档关联信息
+        putRelationData(mapping, schemaItem, dmlData, esFieldData);
         return resultIdVal;
     }
 
@@ -364,13 +406,67 @@ public class ESTemplate {
             }
 
             if (dmlOld.containsKey(columnName) && !mapping.getSkips().contains(fieldItem.getFieldName())) {
-                esFieldData.put(fieldItem.getFieldName(),
+                esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()),
                     getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName));
             }
         }
+
+        // 添加父子文档关联信息
+        putRelationData(mapping, schemaItem, dmlOld, esFieldData);
         return resultIdVal;
     }
 
+    private void putRelationDataFromRS(ESMapping mapping, SchemaItem schemaItem, ResultSet resultSet,
+                                       Map<String, Object> esFieldData) {
+        // 添加父子文档关联信息
+        if (!mapping.getRelations().isEmpty()) {
+            mapping.getRelations().forEach((relationField, relationMapping) -> {
+                Map<String, Object> relations = new HashMap<>();
+                relations.put("name", relationMapping.getName());
+                if (StringUtils.isNotEmpty(relationMapping.getParent())) {
+                    FieldItem parentFieldItem = schemaItem.getSelectFields().get(relationMapping.getParent());
+                    Object parentVal;
+                    try {
+                        parentVal = getValFromRS(mapping,
+                            resultSet,
+                            parentFieldItem.getFieldName(),
+                            parentFieldItem.getFieldName());
+                    } catch (SQLException e) {
+                        throw new RuntimeException(e);
+                    }
+                    if (parentVal != null) {
+                        relations.put("parent", parentVal.toString());
+                        esFieldData.put("$parent_routing", parentVal.toString());
+
+                    }
+                }
+                esFieldData.put(relationField, relations);
+            });
+        }
+    }
+
+    private void putRelationData(ESMapping mapping, SchemaItem schemaItem, Map<String, Object> dmlData,
+                                 Map<String, Object> esFieldData) {
+        // 添加父子文档关联信息
+        if (!mapping.getRelations().isEmpty()) {
+            mapping.getRelations().forEach((relationField, relationMapping) -> {
+                Map<String, Object> relations = new HashMap<>();
+                relations.put("name", relationMapping.getName());
+                if (StringUtils.isNotEmpty(relationMapping.getParent())) {
+                    FieldItem parentFieldItem = schemaItem.getSelectFields().get(relationMapping.getParent());
+                    String columnName = parentFieldItem.getColumnItems().iterator().next().getColumnName();
+                    Object parentVal = getValFromData(mapping, dmlData, parentFieldItem.getFieldName(), columnName);
+                    if (parentVal != null) {
+                        relations.put("parent", parentVal.toString());
+                        esFieldData.put("$parent_routing", parentVal.toString());
+
+                    }
+                }
+                esFieldData.put(relationField, relations);
+            });
+        }
+    }
+
     /**
      * es 字段类型本地缓存
      */

+ 21 - 0
client-adapter/elasticsearch/src/main/resources/es/biz_order.yml

@@ -0,0 +1,21 @@
+dataSourceKey: defaultDS
+destination: example
+groupId: g1
+esMapping:
+  _index: customer
+  _type: _doc
+  _id: _id
+  relations:
+    customer_order:
+      name: order
+      parent: customer_id
+  sql: "select concat('oid_', t.id) as _id,
+        t.customer_id,
+        t.id as order_id,
+        t.serial_code as order_serial,
+        t.c_time as order_time
+        from biz_order t"
+  skips:
+    - customer_id
+  etlCondition: "where t.c_time>='{0}'"
+  commitBatch: 3000

+ 47 - 0
client-adapter/elasticsearch/src/main/resources/es/customer.yml

@@ -0,0 +1,47 @@
+dataSourceKey: defaultDS
+destination: example
+groupId: g1
+esMapping:
+  _index: customer
+  _type: _doc
+  _id: id
+  relations:
+    customer_order:
+      name: customer
+  sql: "select t.id, t.name, t.email from customer t"
+  etlCondition: "where t.c_time>='{0}'"
+  commitBatch: 3000
+
+
+#{
+#  "mappings":{
+#    "_doc":{
+#      "properties":{
+#        "id": {
+#          "type": "long"
+#        },
+#        "name": {
+#          "type": "text"
+#        },
+#        "email": {
+#          "type": "text"
+#        },
+#        "order_id": {
+#          "type": "long"
+#        },
+#        "order_serial": {
+#          "type": "text"
+#        },
+#        "order_time": {
+#          "type": "date"
+#        },
+#        "customer_order":{
+#          "type":"join",
+#          "relations":{
+#            "customer":"order"
+#          }
+#        }
+#      }
+#    }
+#  }
+#}

+ 1 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java

@@ -23,6 +23,7 @@ public class ConfigLoadTest {
     public void testLoad() {
         Map<String, ESSyncConfig> configMap = ESSyncConfigLoader.load(null);
         ESSyncConfig config = configMap.get("mytest_user.yml");
+        config.validate();
         Assert.assertNotNull(config);
         Assert.assertEquals("defaultDS", config.getDataSourceKey());
         ESSyncConfig.ESMapping esMapping = config.getEsMapping();

+ 118 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ESTest.java

@@ -0,0 +1,118 @@
+package com.alibaba.otter.canal.client.adapter.es.test;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.transport.client.PreBuiltTransportClient;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ESTest {
+
+    private TransportClient transportClient;
+
+    @Before
+    public void init() throws UnknownHostException {
+        Settings.Builder settingBuilder = Settings.builder();
+        settingBuilder.put("cluster.name", TestConstant.clusterName);
+        Settings settings = settingBuilder.build();
+        transportClient = new PreBuiltTransportClient(settings);
+        String[] hostArray = TestConstant.esHosts.split(",");
+        for (String host : hostArray) {
+            int i = host.indexOf(":");
+            transportClient.addTransportAddress(new TransportAddress(InetAddress.getByName(host.substring(0, i)),
+                Integer.parseInt(host.substring(i + 1))));
+        }
+    }
+
+    @Test
+    public void test01() {
+        SearchResponse response = transportClient.prepareSearch("test")
+            .setTypes("osm")
+            .setQuery(QueryBuilders.termQuery("_id", "1"))
+            .setSize(10000)
+            .get();
+        for (SearchHit hit : response.getHits()) {
+            System.out.println(hit.getSourceAsMap().get("data").getClass());
+        }
+    }
+
+    @Test
+    public void test02() {
+        Map<String, Object> esFieldData = new LinkedHashMap<>();
+        esFieldData.put("userId", 2L);
+        esFieldData.put("eventId", 4L);
+        esFieldData.put("eventName", "网络异常");
+        esFieldData.put("description", "第四个事件信息");
+
+        Map<String, Object> relations = new LinkedHashMap<>();
+        esFieldData.put("user_event", relations);
+        relations.put("name", "event");
+        relations.put("parent", "2");
+
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        bulkRequestBuilder
+            .add(transportClient.prepareIndex("test", "osm", "2_4").setRouting("2").setSource(esFieldData));
+        commit(bulkRequestBuilder);
+    }
+
+    @Test
+    public void test03() {
+        Map<String, Object> esFieldData = new LinkedHashMap<>();
+        esFieldData.put("userId", 2L);
+        esFieldData.put("eventName", "网络异常1");
+
+        Map<String, Object> relations = new LinkedHashMap<>();
+        esFieldData.put("user_event", relations);
+        relations.put("name", "event");
+        relations.put("parent", "2");
+
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        bulkRequestBuilder.add(transportClient.prepareUpdate("test", "osm", "2_4").setRouting("2").setDoc(esFieldData));
+        commit(bulkRequestBuilder);
+    }
+
+    @Test
+    public void test04() {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        bulkRequestBuilder.add(transportClient.prepareDelete("test", "osm", "2_4"));
+        commit(bulkRequestBuilder);
+    }
+
+    private void commit(BulkRequestBuilder bulkRequestBuilder) {
+        if (bulkRequestBuilder.numberOfActions() > 0) {
+            BulkResponse response = bulkRequestBuilder.execute().actionGet();
+            if (response.hasFailures()) {
+                for (BulkItemResponse itemResponse : response.getItems()) {
+                    if (!itemResponse.isFailed()) {
+                        continue;
+                    }
+
+                    if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) {
+                        System.out.println(itemResponse.getFailureMessage());
+                    } else {
+                        System.out.println("ES bulk commit error" + itemResponse.getFailureMessage());
+                    }
+                }
+            }
+        }
+    }
+
+    @After
+    public void after() {
+        transportClient.close();
+    }
+}

+ 2 - 2
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/TestConstant.java

@@ -11,9 +11,9 @@ public class TestConstant {
     public final static String    jdbcPassword = "121212";
 
     public final static String    esHosts      = "127.0.0.1:9300";
-    public final static String    clusterNmae  = "elasticsearch";
+    public final static String    clusterName  = "elasticsearch";
 
-    public static DruidDataSource dataSource;
+    public final static DruidDataSource dataSource;
 
     static {
         dataSource = new DruidDataSource();

+ 1 - 1
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java

@@ -22,7 +22,7 @@ public class Common {
         outerAdapterConfig.setName("es");
         outerAdapterConfig.setHosts(TestConstant.esHosts);
         Map<String, String> properties = new HashMap<>();
-        properties.put("cluster.name", TestConstant.clusterNmae);
+        properties.put("cluster.name", TestConstant.clusterName);
         outerAdapterConfig.setProperties(properties);
 
         ESAdapter esAdapter = new ESAdapter();

+ 1 - 1
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java

@@ -96,7 +96,7 @@ public class HbaseAdapter implements OuterAdapter {
             hbaseSyncService = new HbaseSyncService(hbaseTemplate);
 
             configMonitor = new HbaseConfigMonitor();
-            configMonitor.init(this);
+            configMonitor.init(this, envProperties);
         } catch (Exception e) {
             throw new RuntimeException(e);
         }

+ 16 - 4
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java

@@ -3,6 +3,7 @@ package com.alibaba.otter.canal.client.adapter.hbase.monitor;
 import java.io.File;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.commons.io.filefilter.FileFilterUtils;
 import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
@@ -11,8 +12,8 @@ import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
 
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
 import com.alibaba.otter.canal.client.adapter.hbase.HbaseAdapter;
 import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
 import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
@@ -26,10 +27,13 @@ public class HbaseConfigMonitor {
 
     private HbaseAdapter          hbaseAdapter;
 
+    private Properties            envProperties;
+
     private FileAlterationMonitor fileMonitor;
 
-    public void init(HbaseAdapter hbaseAdapter) {
+    public void init(HbaseAdapter hbaseAdapter, Properties envProperties) {
         this.hbaseAdapter = hbaseAdapter;
+        this.envProperties = envProperties;
         File confDir = Util.getConfDirPath(adapterName);
         try {
             FileAlterationObserver observer = new FileAlterationObserver(confDir,
@@ -60,7 +64,11 @@ public class HbaseConfigMonitor {
             try {
                 // 加载新增的配置文件
                 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
-                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                MappingConfig config = YmlConfigBinder
+                    .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties);
+                if (config == null) {
+                    return;
+                }
                 config.validate();
                 addConfigToCache(file, config);
 
@@ -83,7 +91,11 @@ public class HbaseConfigMonitor {
                         onFileDelete(file);
                         return;
                     }
-                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    MappingConfig config = YmlConfigBinder
+                        .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties);
+                    if (config == null) {
+                        return;
+                    }
                     config.validate();
                     if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) {
                         deleteConfigFromCache(file);

+ 391 - 397
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java

@@ -1,397 +1,391 @@
-package com.alibaba.otter.canal.client.adapter.hbase.service;
-
-import java.sql.ResultSetMetaData;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
-import javax.sql.DataSource;
-
-import org.apache.hadoop.hbase.util.Bytes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.hbase.support.HRow;
-import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate;
-import com.alibaba.otter.canal.client.adapter.hbase.support.PhType;
-import com.alibaba.otter.canal.client.adapter.hbase.support.PhTypeUtil;
-import com.alibaba.otter.canal.client.adapter.hbase.support.Type;
-import com.alibaba.otter.canal.client.adapter.hbase.support.TypeUtil;
-import com.alibaba.otter.canal.client.adapter.support.EtlResult;
-import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-import com.google.common.base.Joiner;
-
-/**
- * HBase ETL 操作业务类
- *
- * @author rewerma @ 2018-10-20
- * @version 1.0.0
- */
-public class HbaseEtlService {
-
-    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
-
-
-    /**
-     * 建表
-     * 
-     * @param hbaseTemplate
-     * @param config
-     */
-    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
-        try {
-            // 判断hbase表是否存在,不存在则建表
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 导入数据
-     * 
-     * @param ds 数据源
-     * @param hbaseTemplate hbaseTemplate
-     * @param config 配置
-     * @param params 筛选条件
-     * @return 导入结果
-     */
-    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
-                                       List<String> params) {
-        EtlResult etlResult = new EtlResult();
-        AtomicLong successCount = new AtomicLong();
-        List<String> errMsg = new ArrayList<>();
-        String hbaseTable = "";
-        try {
-            if (config == null) {
-                logger.error("Config is null!");
-                etlResult.setSucceeded(false);
-                etlResult.setErrorMessage("Config is null!");
-                return etlResult;
-            }
-            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
-            hbaseTable = hbaseMapping.getHbaseTable();
-
-            long start = System.currentTimeMillis();
-
-            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
-                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
-                // 如果表存在则删除
-                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
-                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
-                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
-                }
-                params = null;
-            } else {
-                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
-            }
-            createTable(hbaseTemplate, config);
-
-            // 拼接sql
-            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
-
-            // 拼接条件
-            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
-                AtomicBoolean stExists = new AtomicBoolean(false);
-                // 验证是否有SYS_TIME字段
-                Util.sqlRS(ds, sql, rs -> {
-                    try {
-                        ResultSetMetaData rsmd = rs.getMetaData();
-                        int cnt = rsmd.getColumnCount();
-                        for (int i = 1; i <= cnt; i++) {
-                            String columnName = rsmd.getColumnName(i);
-                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
-                                stExists.set(true);
-                                break;
-                            }
-                        }
-                    } catch (Exception e) {
-                        // ignore
-                    }
-                    return null;
-                });
-                if (stExists.get()) {
-                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
-                }
-            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
-                String etlCondition = hbaseMapping.getEtlCondition();
-                int size = params.size();
-                for (int i = 0; i < size; i++) {
-                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
-                }
-
-                sql += " " + etlCondition;
-            }
-
-            // 获取总数
-            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
-            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
-                Long count = null;
-                try {
-                    if (rs.next()) {
-                        count = ((Number) rs.getObject(1)).longValue();
-                    }
-                } catch (Exception e) {
-                    logger.error(e.getMessage(), e);
-                }
-                return count == null ? 0 : count;
-            });
-
-            // 当大于1万条记录时开启多线程
-            if (cnt >= 10000) {
-                int threadCount = 3;
-                long perThreadCnt = cnt / threadCount;
-                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
-                for (int i = 0; i < threadCount; i++) {
-                    long offset = i * perThreadCnt;
-                    Long size = null;
-                    if (i != threadCount - 1) {
-                        size = perThreadCnt;
-                    }
-                    String sqlFinal;
-                    if (size != null) {
-                        sqlFinal = sql + " LIMIT " + offset + "," + size;
-                    } else {
-                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
-                    }
-                    Future<Boolean> future = executor.submit(
-                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
-                    futures.add(future);
-                }
-
-                for (Future<Boolean> future : futures) {
-                    future.get();
-                }
-
-                executor.shutdown();
-            } else {
-                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
-            }
-
-            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
-                        + (System.currentTimeMillis() - start) / 1000 + "s!");
-
-            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
-        }
-
-        if (errMsg.isEmpty()) {
-            etlResult.setSucceeded(true);
-        } else {
-            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
-        }
-        return etlResult;
-    }
-
-    /**
-     * 执行导入
-     * 
-     * @param ds
-     * @param sql
-     * @param hbaseMapping
-     * @param hbaseTemplate
-     * @param successCount
-     * @param errMsg
-     * @return
-     */
-    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
-                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
-        try {
-            Util.sqlRS(ds, sql, rs -> {
-                int i = 1;
-
-                try {
-                    boolean complete = false;
-                    List<HRow> rows = new ArrayList<>();
-                    String[] rowKeyColumns = null;
-                    if (hbaseMapping.getRowKey() != null) {
-                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
-                    }
-                    while (rs.next()) {
-                        int cc = rs.getMetaData().getColumnCount();
-                        int[] jdbcTypes = new int[cc];
-                        Class<?>[] classes = new Class[cc];
-                        for (int j = 1; j <= cc; j++) {
-                            int jdbcType = rs.getMetaData().getColumnType(j);
-                            jdbcTypes[j - 1] = jdbcType;
-                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
-                        }
-                        HRow row = new HRow();
-
-                        if (rowKeyColumns != null) {
-                            // 取rowKey字段拼接
-                            StringBuilder rowKeyVale = new StringBuilder();
-                            for (String rowKeyColumnName : rowKeyColumns) {
-                                Object obj = rs.getObject(rowKeyColumnName);
-                                if (obj != null) {
-                                    rowKeyVale.append(obj.toString());
-                                }
-                                rowKeyVale.append("|");
-                            }
-                            int len = rowKeyVale.length();
-                            if (len > 0) {
-                                rowKeyVale.delete(len - 1, len);
-                            }
-                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
-                        }
-
-                        for (int j = 1; j <= cc; j++) {
-                            String columnName = rs.getMetaData().getColumnName(j);
-
-                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
-                            if (val == null) {
-                                continue;
-                            }
-
-                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
-                            // 没有配置映射
-                            if (columnItem == null) {
-                                String family = hbaseMapping.getFamily();
-                                String qualifile = columnName;
-                                if (hbaseMapping.isUppercaseQualifier()) {
-                                    qualifile = qualifile.toUpperCase();
-                                }
-                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(Bytes.toBytes(val.toString()));
-                                    } else {
-                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
-                                    }
-                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                    Type type = Type.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(TypeUtil.toBytes(val, type));
-                                    } else {
-                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
-                                    }
-                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                    PhType phType = PhType.getType(classes[j - 1]);
-                                    if (hbaseMapping.getRowKey() == null && j == 1) {
-                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                    } else {
-                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
-                                    }
-                                }
-                            } else {
-                                // 如果不需要类型转换
-                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
-                                    if (val instanceof java.sql.Date) {
-                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
-                                        val = dateFmt.format((Date) val);
-                                    } else if (val instanceof Timestamp) {
-                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-                                        val = datetimeFmt.format((Date) val);
-                                    }
-
-                                    byte[] valBytes = Bytes.toBytes(val.toString());
-                                    if (columnItem.isRowKey()) {
-                                        if (columnItem.getRowKeyLen() != null) {
-                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.setRowKey(valBytes);
-                                        }
-                                    } else {
-                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                    }
-                                } else {
-                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
-                                        byte[] valBytes = Bytes.toBytes(val.toString());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
-                                            }
-                                            row.setRowKey(valBytes);
-                                        } else {
-                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
-                                        }
-                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
-                                        Type type = Type.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            if (columnItem.getRowKeyLen() != null) {
-                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
-                                                row.setRowKey(Bytes.toBytes(v));
-                                            } else {
-                                                row.setRowKey(TypeUtil.toBytes(val, type));
-                                            }
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                TypeUtil.toBytes(val, type));
-                                        }
-                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
-                                        PhType phType = PhType.getType(columnItem.getType());
-                                        if (columnItem.isRowKey()) {
-                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
-                                        } else {
-                                            row.addCell(columnItem.getFamily(),
-                                                columnItem.getQualifier(),
-                                                PhTypeUtil.toBytes(val, phType));
-                                        }
-                                    }
-                                }
-                            }
-                        }
-
-                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
-
-                        rows.add(row);
-                        complete = false;
-                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
-                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                            rows.clear();
-                            complete = true;
-                        }
-                        i++;
-                        successCount.incrementAndGet();
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("successful import count:" + successCount.get());
-                        }
-                    }
-
-                    if (!complete && !rows.isEmpty()) {
-                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
-                    }
-
-                } catch (Exception e) {
-                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
-                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
-                    // throw new RuntimeException(e);
-                }
-                return i;
-            });
-            return true;
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            return false;
-        }
-    }
-
-    private static String limitLenNum(int len, Object val) {
-        if (val == null) {
-            return null;
-        }
-        if (val instanceof Number) {
-            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
-        } else if (val instanceof String) {
-            return String.format("%0" + len + "d", Long.parseLong((String) val));
-        }
-        return null;
-    }
-}
+package com.alibaba.otter.canal.client.adapter.hbase.service;
+
+import java.sql.ResultSetMetaData;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import javax.sql.DataSource;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HRow;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhType;
+import com.alibaba.otter.canal.client.adapter.hbase.support.PhTypeUtil;
+import com.alibaba.otter.canal.client.adapter.hbase.support.Type;
+import com.alibaba.otter.canal.client.adapter.hbase.support.TypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+import com.google.common.base.Joiner;
+
+/**
+ * HBase ETL 操作业务类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class HbaseEtlService {
+
+    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
+
+    /**
+     * 建表
+     *
+     * @param hbaseTemplate
+     * @param config
+     */
+    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
+        try {
+            // 判断hbase表是否存在,不存在则建表
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 导入数据
+     *
+     * @param ds 数据源
+     * @param hbaseTemplate hbaseTemplate
+     * @param config 配置
+     * @param params 筛选条件
+     * @return 导入结果
+     */
+    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
+                                       List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong successCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String hbaseTable = "";
+        try {
+            if (config == null) {
+                logger.error("Config is null!");
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("Config is null!");
+                return etlResult;
+            }
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            hbaseTable = hbaseMapping.getHbaseTable();
+
+            long start = System.currentTimeMillis();
+
+            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
+                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
+                // 如果表存在则删除
+                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
+                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
+                }
+                params = null;
+            } else {
+                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
+            }
+            createTable(hbaseTemplate, config);
+
+            // 拼接sql
+            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
+
+            // 拼接条件
+            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
+                AtomicBoolean stExists = new AtomicBoolean(false);
+                // 验证是否有SYS_TIME字段
+                Util.sqlRS(ds, sql, rs -> {
+                    try {
+                        ResultSetMetaData rsmd = rs.getMetaData();
+                        int cnt = rsmd.getColumnCount();
+                        for (int i = 1; i <= cnt; i++) {
+                            String columnName = rsmd.getColumnName(i);
+                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
+                                stExists.set(true);
+                                break;
+                            }
+                        }
+                    } catch (Exception e) {
+                        // ignore
+                    }
+                    return null;
+                });
+                if (stExists.get()) {
+                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
+                }
+            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
+                String etlCondition = hbaseMapping.getEtlCondition();
+                int size = params.size();
+                for (int i = 0; i < size; i++) {
+                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+                }
+
+                sql += " " + etlCondition;
+            }
+
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0 : count;
+            });
+
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3;
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Util.newFixedThreadPool(threadCount, 5000L);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                    }
+                    executor.submit(
+                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
+                }
+
+                executor.shutdown();
+                while (!executor.awaitTermination(3, TimeUnit.SECONDS)) {
+                    // ignore
+                }
+            } else {
+                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
+            }
+
+            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
+                        + (System.currentTimeMillis() - start) / 1000 + "s!");
+
+            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
+        }
+
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    /**
+     * 执行导入
+     *
+     * @param ds
+     * @param sql
+     * @param hbaseMapping
+     * @param hbaseTemplate
+     * @param successCount
+     * @param errMsg
+     * @return
+     */
+    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
+                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
+        try {
+            Util.sqlRS(ds, sql, rs -> {
+                int i = 1;
+
+                try {
+                    boolean complete = false;
+                    List<HRow> rows = new ArrayList<>();
+                    String[] rowKeyColumns = null;
+                    if (hbaseMapping.getRowKey() != null) {
+                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                    }
+                    while (rs.next()) {
+                        int cc = rs.getMetaData().getColumnCount();
+                        int[] jdbcTypes = new int[cc];
+                        Class<?>[] classes = new Class[cc];
+                        for (int j = 1; j <= cc; j++) {
+                            int jdbcType = rs.getMetaData().getColumnType(j);
+                            jdbcTypes[j - 1] = jdbcType;
+                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
+                        }
+                        HRow row = new HRow();
+
+                        if (rowKeyColumns != null) {
+                            // 取rowKey字段拼接
+                            StringBuilder rowKeyVale = new StringBuilder();
+                            for (String rowKeyColumnName : rowKeyColumns) {
+                                Object obj = rs.getObject(rowKeyColumnName);
+                                if (obj != null) {
+                                    rowKeyVale.append(obj.toString());
+                                }
+                                rowKeyVale.append("|");
+                            }
+                            int len = rowKeyVale.length();
+                            if (len > 0) {
+                                rowKeyVale.delete(len - 1, len);
+                            }
+                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
+                        }
+
+                        for (int j = 1; j <= cc; j++) {
+                            String columnName = rs.getMetaData().getColumnName(j);
+
+                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
+                            if (val == null) {
+                                continue;
+                            }
+
+                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
+                            // 没有配置映射
+                            if (columnItem == null) {
+                                String family = hbaseMapping.getFamily();
+                                String qualifile = columnName;
+                                if (hbaseMapping.isUppercaseQualifier()) {
+                                    qualifile = qualifile.toUpperCase();
+                                }
+                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(Bytes.toBytes(val.toString()));
+                                    } else {
+                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
+                                    }
+                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                    Type type = Type.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(TypeUtil.toBytes(val, type));
+                                    } else {
+                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
+                                    }
+                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                    PhType phType = PhType.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                    } else {
+                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
+                                    }
+                                }
+                            } else {
+                                // 如果不需要类型转换
+                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
+                                    if (val instanceof java.sql.Date) {
+                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
+                                        val = dateFmt.format((Date) val);
+                                    } else if (val instanceof Timestamp) {
+                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+                                        val = datetimeFmt.format((Date) val);
+                                    }
+
+                                    byte[] valBytes = Bytes.toBytes(val.toString());
+                                    if (columnItem.isRowKey()) {
+                                        if (columnItem.getRowKeyLen() != null) {
+                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.setRowKey(valBytes);
+                                        }
+                                    } else {
+                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                    }
+                                } else {
+                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                        byte[] valBytes = Bytes.toBytes(val.toString());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            }
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                        }
+                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                        Type type = Type.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
+                                                row.setRowKey(Bytes.toBytes(v));
+                                            } else {
+                                                row.setRowKey(TypeUtil.toBytes(val, type));
+                                            }
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                TypeUtil.toBytes(val, type));
+                                        }
+                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                        PhType phType = PhType.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                PhTypeUtil.toBytes(val, phType));
+                                        }
+                                    }
+                                }
+                            }
+                        }
+
+                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
+
+                        rows.add(row);
+                        complete = false;
+                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
+                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                            rows.clear();
+                            complete = true;
+                        }
+                        i++;
+                        successCount.incrementAndGet();
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("successful import count:" + successCount.get());
+                        }
+                    }
+
+                    if (!complete && !rows.isEmpty()) {
+                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
+                    // throw new RuntimeException(e);
+                }
+                return i;
+            });
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private static String limitLenNum(int len, Object val) {
+        if (val == null) {
+            return null;
+        }
+        if (val instanceof Number) {
+            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
+        } else if (val instanceof String) {
+            return String.format("%0" + len + "d", Long.parseLong((String) val));
+        }
+        return null;
+    }
+}

+ 3 - 3
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java

@@ -20,9 +20,9 @@ import com.alibaba.otter.canal.client.adapter.support.Dml;
  */
 public class HbaseSyncService {
 
-    private Logger        logger = LoggerFactory.getLogger(this.getClass());
+    private static Logger logger = LoggerFactory.getLogger(HbaseSyncService.class);
 
-    private HbaseTemplate hbaseTemplate;                                    // HBase操作模板
+    private HbaseTemplate hbaseTemplate;                                           // HBase操作模板
 
     public HbaseSyncService(HbaseTemplate hbaseTemplate){
         this.hbaseTemplate = hbaseTemplate;
@@ -140,7 +140,7 @@ public class HbaseSyncService {
                                         Integer.parseInt((String) entry.getValue()));
                                     bytes = Bytes.toBytes(v);
                                 } catch (Exception e) {
-                                    // ignore
+                                    logger.error(e.getMessage(), e);
                                 }
                             }
                         }

+ 1 - 3
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhTypeUtil.java

@@ -7,12 +7,10 @@ import java.math.RoundingMode;
 import java.sql.Timestamp;
 import java.util.Date;
 
-import com.alibaba.otter.canal.client.adapter.support.Util;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.joda.time.DateTime;
 
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.google.common.math.LongMath;
-import org.joda.time.DateTimeZone;
 
 /**
  * Phoenix类型转换工具类

+ 1 - 1
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/EtlLock.java

@@ -27,7 +27,7 @@ public class EtlLock {
 
     private static final Map<String, InterProcessMutex> DISTRIBUTED_LOCK = new ConcurrentHashMap<>();
 
-    private static Mode                                 mode             = Mode.LOCAL;
+    private Mode                                        mode             = Mode.LOCAL;
 
     @Resource
     private CuratorClient                               curatorClient;

+ 4 - 4
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/SyncSwitch.java

@@ -34,7 +34,7 @@ public class SyncSwitch {
 
     private static final Map<String, BooleanMutex> DISTRIBUTED_LOCK   = new ConcurrentHashMap<>();
 
-    private static Mode                            mode               = Mode.LOCAL;
+    private Mode                                   mode               = Mode.LOCAL;
 
     @Resource
     private AdapterCanalConfig                     adapterCanalConfig;
@@ -165,20 +165,20 @@ public class SyncSwitch {
         }
     }
 
-    public Boolean status(String destination) {
+    public boolean status(String destination) {
         if (mode == Mode.LOCAL) {
             BooleanMutex mutex = LOCAL_LOCK.get(destination);
             if (mutex != null) {
                 return mutex.state();
             } else {
-                return null;
+                return false;
             }
         } else {
             BooleanMutex mutex = DISTRIBUTED_LOCK.get(destination);
             if (mutex != null) {
                 return mutex.state();
             } else {
-                return null;
+                return false;
             }
         }
     }

+ 2 - 2
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java

@@ -3,7 +3,6 @@ package com.alibaba.otter.canal.adapter.launcher.loader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
@@ -17,6 +16,7 @@ import com.alibaba.otter.canal.client.adapter.OuterAdapter;
 import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
 import com.alibaba.otter.canal.client.adapter.support.Dml;
 import com.alibaba.otter.canal.client.adapter.support.MessageUtil;
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.alibaba.otter.canal.protocol.FlatMessage;
 import com.alibaba.otter.canal.protocol.Message;
 
@@ -43,7 +43,7 @@ public abstract class AbstractCanalAdapterWorker {
 
     public AbstractCanalAdapterWorker(List<List<OuterAdapter>> canalOuterAdapters){
         this.canalOuterAdapters = canalOuterAdapters;
-        this.groupInnerExecutorService = Executors.newFixedThreadPool(canalOuterAdapters.size());
+        this.groupInnerExecutorService = Util.newFixedThreadPool(canalOuterAdapters.size(), 5000L);
         syncSwitch = (SyncSwitch) SpringContext.getBean(SyncSwitch.class);
     }
 

+ 7 - 5
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java

@@ -2,13 +2,13 @@ package com.alibaba.otter.canal.adapter.launcher.loader;
 
 import java.util.List;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.kafka.common.errors.WakeupException;
 
 import com.alibaba.otter.canal.client.adapter.OuterAdapter;
 import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.alibaba.otter.canal.client.kafka.KafkaCanalConnector;
 
 /**
@@ -44,12 +44,12 @@ public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
     protected void process() {
         while (!running) {
             try {
-                Thread.sleep(1000);
+                Thread.sleep(500);
             } catch (InterruptedException e) {
                 // ignore
             }
         }
-        ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
+        ExecutorService workerExecutor = Util.newSingleThreadExecutor(5000L);
         int retry = canalClientConfig.getRetries() == null
                     || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
         long timeout = canalClientConfig.getTimeout() == null ? 30000 : canalClientConfig.getTimeout(); // 默认超时30秒
@@ -63,8 +63,8 @@ public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
                 connector.subscribe();
                 logger.info("=============> Subscribe topic: {} succeed <=============", this.topic);
                 while (running) {
-                    Boolean status = syncSwitch.status(canalDestination);
-                    if (status != null && !status) {
+                    boolean status = syncSwitch.status(canalDestination);
+                    if (!status) {
                         connector.disconnect();
                         break;
                     }
@@ -85,6 +85,8 @@ public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
             }
         }
 
+        workerExecutor.shutdown();
+
         try {
             connector.unsubscribe();
         } catch (WakeupException e) {

+ 15 - 23
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java

@@ -5,7 +5,7 @@ import java.net.SocketAddress;
 import java.util.*;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
@@ -176,40 +176,32 @@ public class CanalAdapterLoader {
     public void destroy() {
         if (!canalWorkers.isEmpty()) {
             ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
             for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
-                futures.add(stopExecutorService.submit(() -> {
-                    canalAdapterWorker.stop();
-                    return true;
-                }));
+                stopExecutorService.execute(canalAdapterWorker::stop);
             }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
+            stopExecutorService.shutdown();
+            try {
+                while (!stopExecutorService.awaitTermination(1, TimeUnit.SECONDS)) {
                     // ignore
                 }
-            });
-            stopExecutorService.shutdown();
+            } catch (InterruptedException e) {
+                // ignore
+            }
         }
 
         if (!canalMQWorker.isEmpty()) {
             ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
-            List<Future<Boolean>> futures = new ArrayList<>();
             for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
-                futures.add(stopMQWorkerService.submit(() -> {
-                    canalAdapterMQWorker.stop();
-                    return true;
-                }));
+                stopMQWorkerService.execute(canalAdapterMQWorker::stop);
             }
-            futures.forEach(future -> {
-                try {
-                    future.get();
-                } catch (Exception e) {
+            stopMQWorkerService.shutdown();
+            try {
+                while (!stopMQWorkerService.awaitTermination(1, TimeUnit.SECONDS)) {
                     // ignore
                 }
-            });
-            stopMQWorkerService.shutdown();
+            } catch (InterruptedException e) {
+                // ignore
+            }
         }
         logger.info("All canal adapters destroyed");
     }

+ 8 - 5
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java

@@ -2,12 +2,12 @@ package com.alibaba.otter.canal.adapter.launcher.loader;
 
 import java.util.List;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 
 import org.apache.kafka.common.errors.WakeupException;
 
 import com.alibaba.otter.canal.client.adapter.OuterAdapter;
 import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.Util;
 import com.alibaba.otter.canal.client.rocketmq.RocketMQCanalConnector;
 
 /**
@@ -49,8 +49,9 @@ public class CanalAdapterRocketMQWorker extends AbstractCanalAdapterWorker {
             }
         }
 
-        ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
-        int retry = canalClientConfig.getRetries() == null || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
+        ExecutorService workerExecutor = Util.newSingleThreadExecutor(5000L);
+        int retry = canalClientConfig.getRetries() == null
+                    || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
         long timeout = canalClientConfig.getTimeout() == null ? 30000 : canalClientConfig.getTimeout(); // 默认超时30秒
 
         while (running) {
@@ -62,8 +63,8 @@ public class CanalAdapterRocketMQWorker extends AbstractCanalAdapterWorker {
                 connector.subscribe();
                 logger.info("=============> Subscribe topic: {} succeed<=============", this.topic);
                 while (running) {
-                    Boolean status = syncSwitch.status(canalDestination);
-                    if (status != null && !status) {
+                    boolean status = syncSwitch.status(canalDestination);
+                    if (!status) {
                         connector.disconnect();
                         break;
                     }
@@ -84,6 +85,8 @@ public class CanalAdapterRocketMQWorker extends AbstractCanalAdapterWorker {
             }
         }
 
+        workerExecutor.shutdown();
+
         try {
             connector.unsubscribe();
         } catch (WakeupException e) {

+ 1 - 1
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterWorker.java

@@ -140,7 +140,7 @@ public class CanalAdapterWorker extends AbstractCanalAdapterWorker {
                     }
                 }
 
-            } catch (Exception e) {
+            } catch (Throwable e) {
                 logger.error("process error!", e);
             } finally {
                 connector.disconnect();

+ 4 - 2
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/ApplicationConfigMonitor.java

@@ -1,7 +1,9 @@
 package com.alibaba.otter.canal.adapter.launcher.monitor;
 
 import java.io.File;
-import java.io.FileReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
 import javax.annotation.PostConstruct;
@@ -68,7 +70,7 @@ public class ApplicationConfigMonitor {
             super.onFileChange(file);
             try {
                 // 检查yml格式
-                new Yaml().loadAs(new FileReader(file), Map.class);
+                new Yaml().loadAs(new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8), Map.class);
 
                 canalAdapterService.destroy();
 

+ 8 - 3
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/DbRemoteConfigLoader.java

@@ -1,6 +1,8 @@
 package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
 
-import java.io.FileWriter;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -35,7 +37,7 @@ public class DbRemoteConfigLoader implements RemoteConfigLoader {
 
     private DruidDataSource          dataSource;
 
-    private static volatile long     currentConfigTimestamp = 0;
+    private volatile long            currentConfigTimestamp = 0;
     private Map<String, ConfigItem>  remoteAdapterConfigs   = new MapMaker().makeMap();
 
     private ScheduledExecutorService executor               = Executors.newScheduledThreadPool(2,
@@ -115,7 +117,10 @@ public class DbRemoteConfigLoader implements RemoteConfigLoader {
      * @param content 文件内容
      */
     private void overrideLocalCanalConfig(String content) {
-        try (FileWriter writer = new FileWriter(CommonUtils.getConfPath() + "application.yml")) {
+
+        try (OutputStreamWriter writer = new OutputStreamWriter(
+            new FileOutputStream(CommonUtils.getConfPath() + "application.yml"),
+            StandardCharsets.UTF_8)) {
             writer.write(content);
             writer.flush();
         } catch (Exception e) {

+ 9 - 4
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/remote/RemoteAdapterMonitorImpl.java

@@ -1,11 +1,14 @@
 package com.alibaba.otter.canal.adapter.launcher.monitor.remote;
 
-import com.alibaba.otter.canal.common.utils.CommonUtils;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileWriter;
+import com.alibaba.otter.canal.common.utils.CommonUtils;
 
 /**
  * 远程配置监听器实现
@@ -35,7 +38,9 @@ public class RemoteAdapterMonitorImpl implements RemoteAdapterMonitor {
             }
         }
         String name = configItem.getName();
-        try (FileWriter writer = new FileWriter(confPath + category + "/" + configItem.getName())) {
+        try (OutputStreamWriter writer = new OutputStreamWriter(
+            new FileOutputStream(confPath + category + "/" + configItem.getName()),
+            StandardCharsets.UTF_8)) {
             writer.write(configItem.getContent());
             writer.flush();
             logger.info("## Loaded remote adapter config: {}/{}", category, name);

+ 17 - 17
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java

@@ -69,16 +69,16 @@ public class CommonRest {
         }
         try {
 
-            Boolean oriSwitchStatus;
+            boolean oriSwitchStatus;
             if (destination != null) {
                 oriSwitchStatus = syncSwitch.status(destination);
-                if (oriSwitchStatus != null && oriSwitchStatus) {
+                if (oriSwitchStatus) {
                     syncSwitch.off(destination);
                 }
             } else {
                 // task可能为destination,直接锁task
                 oriSwitchStatus = syncSwitch.status(task);
-                if (oriSwitchStatus != null && oriSwitchStatus) {
+                if (oriSwitchStatus) {
                     syncSwitch.off(task);
                 }
             }
@@ -89,9 +89,9 @@ public class CommonRest {
                 }
                 return adapter.etl(task, paramArray);
             } finally {
-                if (destination != null && oriSwitchStatus != null && oriSwitchStatus) {
+                if (destination != null && oriSwitchStatus) {
                     syncSwitch.on(destination);
-                } else if (destination == null && oriSwitchStatus != null && oriSwitchStatus) {
+                } else if (destination == null && oriSwitchStatus) {
                     syncSwitch.on(task);
                 }
             }
@@ -102,7 +102,7 @@ public class CommonRest {
 
     /**
      * ETL curl http://127.0.0.1:8081/etl/hbase/mytest_person2.yml -X POST
-     * 
+     *
      * @param type 类型 hbase, es
      * @param task 任务名对应配置文件名 mytest_person2.yml
      * @param params etl where条件参数, 为空全部导入
@@ -129,7 +129,7 @@ public class CommonRest {
 
     /**
      * 统计总数 curl http://127.0.0.1:8081/count/hbase/mytest_person2.yml
-     * 
+     *
      * @param type 类型 hbase, es
      * @param task 任务名对应配置文件名 mytest_person2.yml
      * @return
@@ -148,11 +148,11 @@ public class CommonRest {
         Set<String> destinations = adapterCanalConfig.DESTINATIONS;
         for (String destination : destinations) {
             Map<String, String> resMap = new LinkedHashMap<>();
-            Boolean status = syncSwitch.status(destination);
-            String resStatus = "none";
-            if (status != null && status) {
+            boolean status = syncSwitch.status(destination);
+            String resStatus;
+            if (status) {
                 resStatus = "on";
-            } else if (status != null && !status) {
+            } else {
                 resStatus = "off";
             }
             resMap.put("destination", destination);
@@ -164,7 +164,7 @@ public class CommonRest {
 
     /**
      * 实例同步开关 curl http://127.0.0.1:8081/syncSwitch/example/off -X PUT
-     * 
+     *
      * @param destination 实例名称
      * @param status 开关状态: off on
      * @return
@@ -189,17 +189,17 @@ public class CommonRest {
 
     /**
      * 获取实例开关状态 curl http://127.0.0.1:8081/syncSwitch/example
-     * 
+     *
      * @param destination 实例名称
      * @return
      */
     @GetMapping("/syncSwitch/{destination}")
     public Map<String, String> etl(@PathVariable String destination) {
-        Boolean status = syncSwitch.status(destination);
-        String resStatus = "none";
-        if (status != null && status) {
+        boolean status = syncSwitch.status(destination);
+        String resStatus;
+        if (status) {
             resStatus = "on";
-        } else if (status != null && !status) {
+        } else {
             resStatus = "off";
         }
         Map<String, String> res = new LinkedHashMap<>();

+ 1 - 1
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java

@@ -146,7 +146,7 @@ public class RdbAdapter implements OuterAdapter {
             skipDupException);
 
         rdbConfigMonitor = new RdbConfigMonitor();
-        rdbConfigMonitor.init(configuration.getKey(), this);
+        rdbConfigMonitor.init(configuration.getKey(), this, envProperties);
     }
 
     /**

+ 11 - 11
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java

@@ -1,10 +1,10 @@
 package com.alibaba.otter.canal.client.adapter.rdb.config;
 
-import org.apache.commons.lang.StringUtils;
-
 import java.util.LinkedHashMap;
 import java.util.Map;
 
+import org.apache.commons.lang.StringUtils;
+
 /**
  * RDB表映射配置
  *
@@ -21,7 +21,7 @@ public class MappingConfig {
 
     private String    outerAdapterKey; // 对应适配器的key
 
-    private Boolean   concurrent;      // 是否并行同步
+    private boolean   concurrent = false;      // 是否并行同步
 
     private DbMapping dbMapping;       // db映射配置
 
@@ -49,11 +49,11 @@ public class MappingConfig {
         this.outerAdapterKey = outerAdapterKey;
     }
 
-    public Boolean getConcurrent() {
-        return concurrent == null ? false : concurrent;
+    public boolean getConcurrent() {
+        return concurrent;
     }
 
-    public void setConcurrent(Boolean concurrent) {
+    public void setConcurrent(boolean concurrent) {
         this.concurrent = concurrent;
     }
 
@@ -87,11 +87,11 @@ public class MappingConfig {
 
     public static class DbMapping {
 
-        private Boolean             mirrorDb    = false;                 // 是否镜像库
+        private boolean             mirrorDb    = false;                 // 是否镜像库
         private String              database;                            // 数据库名或schema名
         private String              table;                               // 表名
         private Map<String, String> targetPk    = new LinkedHashMap<>(); // 目标表主键字段
-        private Boolean             mapAll      = false;                 // 映射所有字段
+        private boolean             mapAll      = false;                 // 映射所有字段
         private String              targetDb;                            // 目标库名
         private String              targetTable;                         // 目标表名
         private Map<String, String> targetColumns;                       // 目标表字段映射
@@ -103,11 +103,11 @@ public class MappingConfig {
 
         private Map<String, String> allMapColumns;
 
-        public Boolean getMirrorDb() {
-            return mirrorDb == null ? false : mirrorDb;
+        public boolean getMirrorDb() {
+            return mirrorDb;
         }
 
-        public void setMirrorDb(Boolean mirrorDb) {
+        public void setMirrorDb(boolean mirrorDb) {
             this.mirrorDb = mirrorDb;
         }
 

+ 16 - 4
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java

@@ -3,6 +3,7 @@ package com.alibaba.otter.canal.client.adapter.rdb.monitor;
 import java.io.File;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.commons.io.filefilter.FileFilterUtils;
 import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
@@ -11,8 +12,8 @@ import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
 
+import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder;
 import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
 import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
 import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig;
@@ -29,11 +30,14 @@ public class RdbConfigMonitor {
 
     private RdbAdapter            rdbAdapter;
 
+    private Properties            envProperties;
+
     private FileAlterationMonitor fileMonitor;
 
-    public void init(String key, RdbAdapter rdbAdapter) {
+    public void init(String key, RdbAdapter rdbAdapter, Properties envProperties) {
         this.key = key;
         this.rdbAdapter = rdbAdapter;
+        this.envProperties = envProperties;
         File confDir = Util.getConfDirPath(adapterName);
         try {
             FileAlterationObserver observer = new FileAlterationObserver(confDir,
@@ -64,7 +68,11 @@ public class RdbConfigMonitor {
             try {
                 // 加载新增的配置文件
                 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
-                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                MappingConfig config = YmlConfigBinder
+                    .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties);
+                if (config == null) {
+                    return;
+                }
                 config.validate();
                 if ((key == null && config.getOuterAdapterKey() == null)
                     || (key != null && key.equals(config.getOuterAdapterKey()))) {
@@ -90,7 +98,11 @@ public class RdbConfigMonitor {
                         onFileDelete(file);
                         return;
                     }
-                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    MappingConfig config = YmlConfigBinder
+                        .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties);
+                    if (config == null) {
+                        return;
+                    }
                     config.validate();
                     if ((key == null && config.getOuterAdapterKey() == null)
                         || (key != null && key.equals(config.getOuterAdapterKey()))) {

+ 7 - 11
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java

@@ -6,8 +6,7 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -78,8 +77,7 @@ public class RdbEtlService {
             if (cnt >= 10000) {
                 int threadCount = 3;
                 long perThreadCnt = cnt / threadCount;
-                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
-                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                ExecutorService executor = Util.newFixedThreadPool(threadCount, 5000L);
                 for (int i = 0; i < threadCount; i++) {
                     long offset = i * perThreadCnt;
                     Long size = null;
@@ -92,16 +90,14 @@ public class RdbEtlService {
                     } else {
                         sqlFinal = sql + " LIMIT " + offset + "," + cnt;
                     }
-                    Future<Boolean> future = executor
-                        .submit(() -> executeSqlImport(srcDS, targetDS, sqlFinal, dbMapping, successCount, errMsg));
-                    futures.add(future);
-                }
-
-                for (Future<Boolean> future : futures) {
-                    future.get();
+                    executor
+                        .execute(() -> executeSqlImport(srcDS, targetDS, sqlFinal, dbMapping, successCount, errMsg));
                 }
 
                 executor.shutdown();
+                while (!executor.awaitTermination(3, TimeUnit.SECONDS)) {
+                    // ignore
+                }
             } else {
                 executeSqlImport(srcDS, targetDS, sql.toString(), dbMapping, successCount, errMsg);
             }

+ 507 - 507
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java

@@ -1,507 +1,507 @@
-package com.alibaba.otter.canal.client.adapter.rdb.service;
-
-import java.sql.Connection;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.function.Function;
-
-import javax.sql.DataSource;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.serializer.SerializerFeature;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
-import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
-import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
-import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
-import com.alibaba.otter.canal.client.adapter.support.Dml;
-import com.alibaba.otter.canal.client.adapter.support.Util;
-
-/**
- * RDB同步操作业务
- *
- * @author rewerma 2018-11-7 下午06:45:49
- * @version 1.0.0
- */
-public class RdbSyncService {
-
-    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
-
-    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
-    private Map<String, Map<String, Integer>> columnsTypeCache;
-
-    private int                               threads = 3;
-    private boolean                           skipDupException;
-
-    private List<SyncItem>[]                  dmlsPartition;
-    private BatchExecutor[]                   batchExecutors;
-    private ExecutorService[]                 executorThreads;
-
-    public List<SyncItem>[] getDmlsPartition() {
-        return dmlsPartition;
-    }
-
-    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
-        return columnsTypeCache;
-    }
-
-    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
-        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
-    }
-
-    @SuppressWarnings("unchecked")
-    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
-                          boolean skipDupException){
-        this.columnsTypeCache = columnsTypeCache;
-        this.skipDupException = skipDupException;
-        try {
-            if (threads != null) {
-                this.threads = threads;
-            }
-            this.dmlsPartition = new List[this.threads];
-            this.batchExecutors = new BatchExecutor[this.threads];
-            this.executorThreads = new ExecutorService[this.threads];
-            for (int i = 0; i < this.threads; i++) {
-                dmlsPartition[i] = new ArrayList<>();
-                batchExecutors[i] = new BatchExecutor(dataSource);
-                executorThreads[i] = Executors.newSingleThreadExecutor();
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * 批量同步回调
-     *
-     * @param dmls 批量 DML
-     * @param function 回调方法
-     */
-    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
-        try {
-            boolean toExecute = false;
-            for (Dml dml : dmls) {
-                if (!toExecute) {
-                    toExecute = function.apply(dml);
-                } else {
-                    function.apply(dml);
-                }
-            }
-            if (toExecute) {
-                List<Future<Boolean>> futures = new ArrayList<>();
-                for (int i = 0; i < threads; i++) {
-                    int j = i;
-                    futures.add(executorThreads[i].submit(() -> {
-                        try {
-                            dmlsPartition[j].forEach(syncItem -> sync(batchExecutors[j],
-                                syncItem.config,
-                                syncItem.singleDml));
-                            dmlsPartition[j].clear();
-                            batchExecutors[j].commit();
-                            return true;
-                        } catch (Throwable e) {
-                            batchExecutors[j].rollback();
-                            throw new RuntimeException(e);
-                        }
-                    }));
-                }
-
-                futures.forEach(future -> {
-                    try {
-                        future.get();
-                    } catch (ExecutionException | InterruptedException e) {
-                        throw new RuntimeException(e);
-                    }
-                });
-            }
-        } finally {
-            for (BatchExecutor batchExecutor : batchExecutors) {
-                if (batchExecutor != null) {
-                    batchExecutor.close();
-                }
-            }
-        }
-    }
-
-    /**
-     * 批量同步
-     *
-     * @param mappingConfig 配置集合
-     * @param dmls 批量 DML
-     */
-    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls, Properties envProperties) {
-        sync(dmls, dml -> {
-            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
-                // DDL
-            columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
-            return false;
-        } else {
-            // DML
-            String destination = StringUtils.trimToEmpty(dml.getDestination());
-            String groupId = StringUtils.trimToEmpty(dml.getGroupId());
-            String database = dml.getDatabase();
-            String table = dml.getTable();
-            Map<String, MappingConfig> configMap;
-            if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
-                configMap = mappingConfig.get(destination + "-" + groupId + "_" + database + "-" + table);
-            } else {
-                configMap = mappingConfig.get(destination + "_" + database + "-" + table);
-            }
-
-            if (configMap == null) {
-                return false;
-            }
-
-            if (configMap.values().isEmpty()) {
-                return false;
-            }
-
-            for (MappingConfig config : configMap.values()) {
-                if (config.getConcurrent()) {
-                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                    singleDmls.forEach(singleDml -> {
-                        int hash = pkHash(config.getDbMapping(), singleDml.getData());
-                        SyncItem syncItem = new SyncItem(config, singleDml);
-                        dmlsPartition[hash].add(syncItem);
-                    });
-                } else {
-                    int hash = 0;
-                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
-                    singleDmls.forEach(singleDml -> {
-                        SyncItem syncItem = new SyncItem(config, singleDml);
-                        dmlsPartition[hash].add(syncItem);
-                    });
-                }
-            }
-            return true;
-        }
-    }   );
-    }
-
-    /**
-     * 单条 dml 同步
-     *
-     * @param batchExecutor 批量事务执行器
-     * @param config 对应配置对象
-     * @param dml DML
-     */
-    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
-        if (config != null) {
-            try {
-                String type = dml.getType();
-                if (type != null && type.equalsIgnoreCase("INSERT")) {
-                    insert(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
-                    update(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
-                    delete(batchExecutor, config, dml);
-                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
-                    truncate(batchExecutor, config);
-                }
-                if (logger.isDebugEnabled()) {
-                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
-                }
-            } catch (SQLException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-
-    /**
-     * 插入操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        StringBuilder insertSql = new StringBuilder();
-        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
-
-        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
-        int len = insertSql.length();
-        insertSql.delete(len - 1, len).append(") VALUES (");
-        int mapLen = columnsMap.size();
-        for (int i = 0; i < mapLen; i++) {
-            insertSql.append("?,");
-        }
-        len = insertSql.length();
-        insertSql.delete(len - 1, len).append(")");
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            Object value = data.get(srcColumnName);
-            BatchExecutor.setValue(values, type, value);
-        }
-
-        try {
-            batchExecutor.execute(insertSql.toString(), values);
-        } catch (SQLException e) {
-            if (skipDupException
-                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
-                // ignore
-                // TODO 增加更多关系数据库的主键冲突的错误码
-            } else {
-                throw e;
-            }
-        }
-        if (logger.isTraceEnabled()) {
-            logger.trace("Insert into target table, sql: {}", insertSql);
-        }
-
-    }
-
-    /**
-     * 更新操作
-     *
-     * @param config 配置项
-     * @param dml DML数据
-     */
-    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        Map<String, Object> old = dml.getOld();
-        if (old == null || old.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder updateSql = new StringBuilder();
-        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
-        List<Map<String, ?>> values = new ArrayList<>();
-        boolean hasMatched = false;
-        for (String srcColumnName : old.keySet()) {
-            List<String> targetColumnNames = new ArrayList<>();
-            columnsMap.forEach((targetColumn, srcColumn) -> {
-                if (srcColumnName.equalsIgnoreCase(srcColumn)) {
-                    targetColumnNames.add(targetColumn);
-                }
-            });
-            if (!targetColumnNames.isEmpty()) {
-                hasMatched = true;
-                for (String targetColumnName : targetColumnNames) {
-                    updateSql.append(targetColumnName).append("=?, ");
-                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-                    if (type == null) {
-                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-                    }
-                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
-                }
-            }
-        }
-        if (!hasMatched) {
-            logger.warn("Did not matched any columns to update ");
-            return;
-        }
-        int len = updateSql.length();
-        updateSql.delete(len - 2, len).append(" WHERE ");
-
-        // 拼接主键
-        appendCondition(dbMapping, updateSql, ctype, values, data, old);
-        batchExecutor.execute(updateSql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Update target table, sql: {}", updateSql);
-        }
-    }
-
-    /**
-     * 删除操作
-     *
-     * @param config
-     * @param dml
-     */
-    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
-        Map<String, Object> data = dml.getData();
-        if (data == null || data.isEmpty()) {
-            return;
-        }
-
-        DbMapping dbMapping = config.getDbMapping();
-
-        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
-
-        StringBuilder sql = new StringBuilder();
-        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
-
-        List<Map<String, ?>> values = new ArrayList<>();
-        // 拼接主键
-        appendCondition(dbMapping, sql, ctype, values, data);
-        batchExecutor.execute(sql.toString(), values);
-        if (logger.isTraceEnabled()) {
-            logger.trace("Delete from target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * truncate操作
-     *
-     * @param config
-     */
-    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
-        DbMapping dbMapping = config.getDbMapping();
-        StringBuilder sql = new StringBuilder();
-        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
-        batchExecutor.execute(sql.toString(), new ArrayList<>());
-        if (logger.isTraceEnabled()) {
-            logger.trace("Truncate target table, sql: {}", sql);
-        }
-    }
-
-    /**
-     * 获取目标字段类型
-     *
-     * @param conn sql connection
-     * @param config 映射配置
-     * @return 字段sqlType
-     */
-    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
-        DbMapping dbMapping = config.getDbMapping();
-        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
-        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
-        if (columnType == null) {
-            synchronized (RdbSyncService.class) {
-                columnType = columnsTypeCache.get(cacheKey);
-                if (columnType == null) {
-                    columnType = new LinkedHashMap<>();
-                    final Map<String, Integer> columnTypeTmp = columnType;
-                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
-                    Util.sqlRS(conn, sql, rs -> {
-                        try {
-                            ResultSetMetaData rsd = rs.getMetaData();
-                            int columnCount = rsd.getColumnCount();
-                            for (int i = 1; i <= columnCount; i++) {
-                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
-                            }
-                            columnsTypeCache.put(cacheKey, columnTypeTmp);
-                        } catch (SQLException e) {
-                            logger.error(e.getMessage(), e);
-                        }
-                    });
-                }
-            }
-        }
-        return columnType;
-    }
-
-    /**
-     * 拼接主键 where条件
-     */
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d) {
-        appendCondition(dbMapping, sql, ctype, values, d, null);
-    }
-
-    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
-                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
-        // 拼接主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            sql.append(targetColumnName).append("=? AND ");
-            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
-            if (type == null) {
-                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
-            }
-            // 如果有修改主键的情况
-            if (o != null && o.containsKey(srcColumnName)) {
-                BatchExecutor.setValue(values, type, o.get(srcColumnName));
-            } else {
-                BatchExecutor.setValue(values, type, d.get(srcColumnName));
-            }
-        }
-        int len = sql.length();
-        sql.delete(len - 4, len);
-    }
-
-    public static class SyncItem {
-
-        private MappingConfig config;
-        private SingleDml     singleDml;
-
-        public SyncItem(MappingConfig config, SingleDml singleDml){
-            this.config = config;
-            this.singleDml = singleDml;
-        }
-    }
-
-    /**
-     * 取主键hash
-     */
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
-        return pkHash(dbMapping, d, null);
-    }
-
-    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
-        int hash = 0;
-        // 取主键
-        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
-            String targetColumnName = entry.getKey();
-            String srcColumnName = entry.getValue();
-            if (srcColumnName == null) {
-                srcColumnName = Util.cleanColumn(targetColumnName);
-            }
-            Object value = null;
-            if (o != null && o.containsKey(srcColumnName)) {
-                value = o.get(srcColumnName);
-            } else if (d != null) {
-                value = d.get(srcColumnName);
-            }
-            if (value != null) {
-                hash += value.hashCode();
-            }
-        }
-        hash = Math.abs(hash) % threads;
-        return Math.abs(hash);
-    }
-
-    public void close() {
-        for (int i = 0; i < threads; i++) {
-            executorThreads[i].shutdown();
-        }
-    }
-}
+package com.alibaba.otter.canal.client.adapter.rdb.service;
+
+import java.sql.Connection;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+/**
+ * RDB同步操作业务
+ *
+ * @author rewerma 2018-11-7 下午06:45:49
+ * @version 1.0.0
+ */
+public class RdbSyncService {
+
+    private static final Logger               logger  = LoggerFactory.getLogger(RdbSyncService.class);
+
+    // 源库表字段类型缓存: instance.schema.table -> <columnName, jdbcType>
+    private Map<String, Map<String, Integer>> columnsTypeCache;
+
+    private int                               threads = 3;
+    private boolean                           skipDupException;
+
+    private List<SyncItem>[]                  dmlsPartition;
+    private BatchExecutor[]                   batchExecutors;
+    private ExecutorService[]                 executorThreads;
+
+    public List<SyncItem>[] getDmlsPartition() {
+        return dmlsPartition;
+    }
+
+    public Map<String, Map<String, Integer>> getColumnsTypeCache() {
+        return columnsTypeCache;
+    }
+
+    public RdbSyncService(DataSource dataSource, Integer threads, boolean skipDupException){
+        this(dataSource, threads, new ConcurrentHashMap<>(), skipDupException);
+    }
+
+    @SuppressWarnings("unchecked")
+    public RdbSyncService(DataSource dataSource, Integer threads, Map<String, Map<String, Integer>> columnsTypeCache,
+                          boolean skipDupException){
+        this.columnsTypeCache = columnsTypeCache;
+        this.skipDupException = skipDupException;
+        try {
+            if (threads != null) {
+                this.threads = threads;
+            }
+            this.dmlsPartition = new List[this.threads];
+            this.batchExecutors = new BatchExecutor[this.threads];
+            this.executorThreads = new ExecutorService[this.threads];
+            for (int i = 0; i < this.threads; i++) {
+                dmlsPartition[i] = new ArrayList<>();
+                batchExecutors[i] = new BatchExecutor(dataSource);
+                executorThreads[i] = Executors.newSingleThreadExecutor();
+            }
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 批量同步回调
+     *
+     * @param dmls 批量 DML
+     * @param function 回调方法
+     */
+    public void sync(List<Dml> dmls, Function<Dml, Boolean> function) {
+        try {
+            boolean toExecute = false;
+            for (Dml dml : dmls) {
+                if (!toExecute) {
+                    toExecute = function.apply(dml);
+                } else {
+                    function.apply(dml);
+                }
+            }
+            if (toExecute) {
+                List<Future<Boolean>> futures = new ArrayList<>();
+                for (int i = 0; i < threads; i++) {
+                    int j = i;
+                    futures.add(executorThreads[i].submit(() -> {
+                        try {
+                            dmlsPartition[j].forEach(syncItem -> sync(batchExecutors[j],
+                                syncItem.config,
+                                syncItem.singleDml));
+                            dmlsPartition[j].clear();
+                            batchExecutors[j].commit();
+                            return true;
+                        } catch (Throwable e) {
+                            batchExecutors[j].rollback();
+                            throw new RuntimeException(e);
+                        }
+                    }));
+                }
+
+                futures.forEach(future -> {
+                    try {
+                        future.get();
+                    } catch (ExecutionException | InterruptedException e) {
+                        throw new RuntimeException(e);
+                    }
+                });
+            }
+        } finally {
+            for (BatchExecutor batchExecutor : batchExecutors) {
+                if (batchExecutor != null) {
+                    batchExecutor.close();
+                }
+            }
+        }
+    }
+
+    /**
+     * 批量同步
+     *
+     * @param mappingConfig 配置集合
+     * @param dmls 批量 DML
+     */
+    public void sync(Map<String, Map<String, MappingConfig>> mappingConfig, List<Dml> dmls, Properties envProperties) {
+        sync(dmls, dml -> {
+            if (dml.getIsDdl() != null && dml.getIsDdl() && StringUtils.isNotEmpty(dml.getSql())) {
+                // DDL
+            columnsTypeCache.remove(dml.getDestination() + "." + dml.getDatabase() + "." + dml.getTable());
+            return false;
+        } else {
+            // DML
+            String destination = StringUtils.trimToEmpty(dml.getDestination());
+            String groupId = StringUtils.trimToEmpty(dml.getGroupId());
+            String database = dml.getDatabase();
+            String table = dml.getTable();
+            Map<String, MappingConfig> configMap;
+            if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) {
+                configMap = mappingConfig.get(destination + "-" + groupId + "_" + database + "-" + table);
+            } else {
+                configMap = mappingConfig.get(destination + "_" + database + "-" + table);
+            }
+
+            if (configMap == null) {
+                return false;
+            }
+
+            if (configMap.values().isEmpty()) {
+                return false;
+            }
+
+            for (MappingConfig config : configMap.values()) {
+                if (config.getConcurrent()) {
+                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                    singleDmls.forEach(singleDml -> {
+                        int hash = pkHash(config.getDbMapping(), singleDml.getData());
+                        SyncItem syncItem = new SyncItem(config, singleDml);
+                        dmlsPartition[hash].add(syncItem);
+                    });
+                } else {
+                    int hash = 0;
+                    List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                    singleDmls.forEach(singleDml -> {
+                        SyncItem syncItem = new SyncItem(config, singleDml);
+                        dmlsPartition[hash].add(syncItem);
+                    });
+                }
+            }
+            return true;
+        }
+    }   );
+    }
+
+    /**
+     * 单条 dml 同步
+     *
+     * @param batchExecutor 批量事务执行器
+     * @param config 对应配置对象
+     * @param dml DML
+     */
+    public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        if (config != null) {
+            try {
+                String type = dml.getType();
+                if (type != null && type.equalsIgnoreCase("INSERT")) {
+                    insert(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                    update(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                    delete(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) {
+                    truncate(batchExecutor, config);
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                }
+            } catch (SQLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    /**
+     * 插入操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        StringBuilder insertSql = new StringBuilder();
+        insertSql.append("INSERT INTO ").append(SyncUtil.getDbTableName(dbMapping)).append(" (");
+
+        columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+        int len = insertSql.length();
+        insertSql.delete(len - 1, len).append(") VALUES (");
+        int mapLen = columnsMap.size();
+        for (int i = 0; i < mapLen; i++) {
+            insertSql.append("?,");
+        }
+        len = insertSql.length();
+        insertSql.delete(len - 1, len).append(")");
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            Object value = data.get(srcColumnName);
+            BatchExecutor.setValue(values, type, value);
+        }
+
+        try {
+            batchExecutor.execute(insertSql.toString(), values);
+        } catch (SQLException e) {
+            if (skipDupException
+                && (e.getMessage().contains("Duplicate entry") || e.getMessage().startsWith("ORA-00001: 违反唯一约束条件"))) {
+                // ignore
+                // TODO 增加更多关系数据库的主键冲突的错误码
+            } else {
+                throw e;
+            }
+        }
+        if (logger.isTraceEnabled()) {
+            logger.trace("Insert into target table, sql: {}", insertSql);
+        }
+
+    }
+
+    /**
+     * 更新操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        Map<String, Object> old = dml.getOld();
+        if (old == null || old.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder updateSql = new StringBuilder();
+        updateSql.append("UPDATE ").append(SyncUtil.getDbTableName(dbMapping)).append(" SET ");
+        List<Map<String, ?>> values = new ArrayList<>();
+        boolean hasMatched = false;
+        for (String srcColumnName : old.keySet()) {
+            List<String> targetColumnNames = new ArrayList<>();
+            columnsMap.forEach((targetColumn, srcColumn) -> {
+                if (srcColumnName.equalsIgnoreCase(srcColumn)) {
+                    targetColumnNames.add(targetColumn);
+                }
+            });
+            if (!targetColumnNames.isEmpty()) {
+                hasMatched = true;
+                for (String targetColumnName : targetColumnNames) {
+                    updateSql.append(targetColumnName).append("=?, ");
+                    Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+                    if (type == null) {
+                        throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+                    }
+                    BatchExecutor.setValue(values, type, data.get(srcColumnName));
+                }
+            }
+        }
+        if (!hasMatched) {
+            logger.warn("Did not matched any columns to update ");
+            return;
+        }
+        int len = updateSql.length();
+        updateSql.delete(len - 2, len).append(" WHERE ");
+
+        // 拼接主键
+        appendCondition(dbMapping, updateSql, ctype, values, data, old);
+        batchExecutor.execute(updateSql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Update target table, sql: {}", updateSql);
+        }
+    }
+
+    /**
+     * 删除操作
+     *
+     * @param config
+     * @param dml
+     */
+    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) throws SQLException {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+        StringBuilder sql = new StringBuilder();
+        sql.append("DELETE FROM ").append(SyncUtil.getDbTableName(dbMapping)).append(" WHERE ");
+
+        List<Map<String, ?>> values = new ArrayList<>();
+        // 拼接主键
+        appendCondition(dbMapping, sql, ctype, values, data);
+        batchExecutor.execute(sql.toString(), values);
+        if (logger.isTraceEnabled()) {
+            logger.trace("Delete from target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * truncate操作
+     *
+     * @param config
+     */
+    private void truncate(BatchExecutor batchExecutor, MappingConfig config) throws SQLException {
+        DbMapping dbMapping = config.getDbMapping();
+        StringBuilder sql = new StringBuilder();
+        sql.append("TRUNCATE TABLE ").append(SyncUtil.getDbTableName(dbMapping));
+        batchExecutor.execute(sql.toString(), new ArrayList<>());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Truncate target table, sql: {}", sql);
+        }
+    }
+
+    /**
+     * 获取目标字段类型
+     *
+     * @param conn sql connection
+     * @param config 映射配置
+     * @return 字段sqlType
+     */
+    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
+        DbMapping dbMapping = config.getDbMapping();
+        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
+        Map<String, Integer> columnType = columnsTypeCache.get(cacheKey);
+        if (columnType == null) {
+            synchronized (RdbSyncService.class) {
+                columnType = columnsTypeCache.get(cacheKey);
+                if (columnType == null) {
+                    columnType = new LinkedHashMap<>();
+                    final Map<String, Integer> columnTypeTmp = columnType;
+                    String sql = "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping) + " WHERE 1=2";
+                    Util.sqlRS(conn, sql, rs -> {
+                        try {
+                            ResultSetMetaData rsd = rs.getMetaData();
+                            int columnCount = rsd.getColumnCount();
+                            for (int i = 1; i <= columnCount; i++) {
+                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
+                            }
+                            columnsTypeCache.put(cacheKey, columnTypeTmp);
+                        } catch (SQLException e) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    });
+                }
+            }
+        }
+        return columnType;
+    }
+
+    /**
+     * 拼接主键 where条件
+     */
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d) {
+        appendCondition(dbMapping, sql, ctype, values, d, null);
+    }
+
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
+        // 拼接主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            sql.append(targetColumnName).append("=? AND ");
+            Integer type = ctype.get(Util.cleanColumn(targetColumnName).toLowerCase());
+            if (type == null) {
+                throw new RuntimeException("Target column: " + targetColumnName + " not matched");
+            }
+            // 如果有修改主键的情况
+            if (o != null && o.containsKey(srcColumnName)) {
+                BatchExecutor.setValue(values, type, o.get(srcColumnName));
+            } else {
+                BatchExecutor.setValue(values, type, d.get(srcColumnName));
+            }
+        }
+        int len = sql.length();
+        sql.delete(len - 4, len);
+    }
+
+    public static class SyncItem {
+
+        private MappingConfig config;
+        private SingleDml     singleDml;
+
+        public SyncItem(MappingConfig config, SingleDml singleDml){
+            this.config = config;
+            this.singleDml = singleDml;
+        }
+    }
+
+    /**
+     * 取主键hash
+     */
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d) {
+        return pkHash(dbMapping, d, null);
+    }
+
+    public int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
+        int hash = 0;
+        // 取主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = Util.cleanColumn(targetColumnName);
+            }
+            Object value = null;
+            if (o != null && o.containsKey(srcColumnName)) {
+                value = o.get(srcColumnName);
+            } else if (d != null) {
+                value = d.get(srcColumnName);
+            }
+            if (value != null) {
+                hash += value.hashCode();
+            }
+        }
+        hash = Math.abs(hash) % threads;
+        return Math.abs(hash);
+    }
+
+    public void close() {
+        for (int i = 0; i < threads; i++) {
+            executorThreads[i].shutdown();
+        }
+    }
+}

+ 3 - 4
client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/DBTest.java

@@ -92,17 +92,16 @@ public class DBTest {
 
     private String clob2Str(Clob clob) {
         String content = "";
-        try {
-            Reader is = clob.getCharacterStream();
-            BufferedReader buff = new BufferedReader(is);
+        try (Reader is = clob.getCharacterStream(); BufferedReader buff = new BufferedReader(is)) {
             String line = buff.readLine();
-            StringBuffer sb = new StringBuffer();
+            StringBuilder sb = new StringBuilder();
             while (line != null) {
                 sb.append(line);
                 line = buff.readLine();
             }
             content = sb.toString();
         } catch (Exception e) {
+            e.printStackTrace();
         }
         return content;
     }

+ 1 - 1
client-adapter/rdb/src/test/java/com/alibaba/otter/canal/client/adapter/rdb/test/TestConstant.java

@@ -10,7 +10,7 @@ public class TestConstant {
     public final static String    jdbcUser     = "root";
     public final static String    jdbcPassword = "121212";
 
-    public static DruidDataSource dataSource;
+    public final static DruidDataSource dataSource;
 
     static {
         dataSource = new DruidDataSource();