Forráskód Böngészése

Merge pull request #2 from alibaba/master

merge master
DinoZhang 6 éve
szülő
commit
67cd0e5fca
100 módosított fájl, 13980 hozzáadás és 27 törlés
  1. 2 0
      .gitignore
  2. 53 16
      README.md
  3. 545 0
      client-adapter/README.md
  4. 36 0
      client-adapter/common/pom.xml
  5. 68 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/OuterAdapter.java
  6. 192 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/CanalClientConfig.java
  7. 6 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Constant.java
  8. 81 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/DatasourceConfig.java
  9. 118 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Dml.java
  10. 44 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/EtlResult.java
  11. 421 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/ExtensionLoader.java
  12. 149 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java
  13. 71 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java
  14. 181 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MessageUtil.java
  15. 62 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/OuterAdapterConfig.java
  16. 57 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Result.java
  17. 22 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/SPI.java
  18. 88 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/URLClassExtensionLoader.java
  19. 120 0
      client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java
  20. 95 0
      client-adapter/elasticsearch/pom.xml
  21. 227 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java
  22. 194 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java
  23. 41 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfigLoader.java
  24. 422 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java
  25. 210 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SqlParser.java
  26. 151 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/monitor/ESConfigMonitor.java
  27. 286 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java
  28. 862 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java
  29. 335 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java
  30. 526 0
      client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java
  31. 1 0
      client-adapter/elasticsearch/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter
  32. 16 0
      client-adapter/elasticsearch/src/main/resources/es/mytest_user.yml
  33. 38 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java
  34. 47 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/SqlParseTest.java
  35. 40 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/TestConstant.java
  36. 68 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java
  37. 130 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/LabelSyncJoinSub2Test.java
  38. 130 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/LabelSyncJoinSubTest.java
  39. 95 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/RoleSyncJoinOne2Test.java
  40. 191 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/RoleSyncJoinOneTest.java
  41. 96 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/UserSyncJoinOneTest.java
  42. 122 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/UserSyncSingleTest.java
  43. 39 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/db_schema.sql
  44. 21 0
      client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/es_mapping.json
  45. 8 0
      client-adapter/elasticsearch/src/test/resources/es/mytest_user_single.yml_
  46. 13 0
      client-adapter/elasticsearch/src/test/resources/log4j2-test.xml
  47. 1 7
      client-adapter/elasticsearch/src/test/resources/logback-test.xml
  48. 88 0
      client-adapter/hbase/pom.xml
  49. 213 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java
  50. 394 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfig.java
  51. 46 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java
  52. 129 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java
  53. 392 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java
  54. 459 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java
  55. 84 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/HRow.java
  56. 163 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/HbaseTemplate.java
  57. 73 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhType.java
  58. 620 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhTypeUtil.java
  59. 67 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/Type.java
  60. 189 0
      client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/TypeUtil.java
  61. 1 0
      client-adapter/hbase/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter
  62. 60 0
      client-adapter/hbase/src/main/resources/hbase/mytest_person2.yml
  63. 261 0
      client-adapter/launcher/pom.xml
  64. 69 0
      client-adapter/launcher/src/main/assembly/dev.xml
  65. 74 0
      client-adapter/launcher/src/main/assembly/release.xml
  66. 5 0
      client-adapter/launcher/src/main/bin/restart.sh
  67. 22 0
      client-adapter/launcher/src/main/bin/startup.bat
  68. 67 0
      client-adapter/launcher/src/main/bin/startup.sh
  69. 4 4
      client-adapter/launcher/src/main/bin/stop.sh
  70. 21 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java
  71. 120 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/EtlLock.java
  72. 6 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/Mode.java
  73. 214 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/SyncSwitch.java
  74. 79 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/AdapterCanalConfig.java
  75. 42 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/CuratorClient.java
  76. 34 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/SpringContext.java
  77. 243 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java
  78. 79 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java
  79. 198 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java
  80. 74 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java
  81. 85 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterService.java
  82. 191 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterWorker.java
  83. 90 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/ApplicationConfigMonitor.java
  84. 209 0
      client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java
  85. 62 0
      client-adapter/launcher/src/main/resources/application.yml
  86. 46 0
      client-adapter/logger/pom.xml
  87. 46 0
      client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java
  88. 1 0
      client-adapter/logger/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter
  89. 128 0
      client-adapter/pom.xml
  90. 97 0
      client-adapter/rdb/pom.xml
  91. 216 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java
  92. 46 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java
  93. 182 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java
  94. 141 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java
  95. 293 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java
  96. 409 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java
  97. 91 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/BatchExecutor.java
  98. 83 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SingleDml.java
  99. 252 0
      client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SyncUtil.java
  100. 1 0
      client-adapter/rdb/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter

+ 2 - 0
.gitignore

@@ -14,3 +14,5 @@ jtester.properties
 .idea/
 *.iml
 .DS_Store
+*.tar.gz
+*.rpm

+ 53 - 16
README.md

@@ -1,12 +1,6 @@
 <div class="blog_content">
     <div class="iteye-blog-content-contain">
-
-<h3>最新更新</h3>
-<ol>
-<li>canal QQ讨论群已经建立,群号:161559791 ,欢迎加入进行技术讨论。</li>
-<li>canal消费端项目开源: Otter(分布式数据库同步系统),地址:<a href="https://github.com/alibaba/otter">https://github.com/alibaba/otter</a></li>
-</ol>
-
+    	
 <h1>背景</h1>
 <p style="font-size: 14px;">   早期,阿里巴巴B2B公司因为存在杭州和美国双机房部署,存在跨机房同步的业务需求。不过早期的数据库同步业务,主要是基于trigger的方式获取增量变更,不过从2010年开始,阿里系公司开始逐步的尝试基于数据库的日志解析,获取增量变更进行同步,由此衍生出了增量订阅&amp;消费的业务,从此开启了一段新纪元。</p>
 <p style="font-size: 14px;">   ps. 目前内部版本已经支持mysql和oracle部分版本的日志解析,当前的canal开源版本支持5.7及以下的版本(阿里内部mysql 5.7.13, 5.6.10, mysql 5.5.18和5.1.40/48)</p>
@@ -44,6 +38,16 @@
 <li>canal解析binary log对象(原始为byte流)</li>
 </ol>
 
+<h1>重要版本更新说明</h1>
+
+canal 1.1.x系列,参考release文档:<a href="https://github.com/alibaba/canal/releases">版本发布信息</a>
+
+1. 整体性能测试&优化,提升了150%. #726 参考: 【[Performance](https://github.com/alibaba/canal/wiki/Performance)】
+2. 原生支持prometheus监控 #765 【[Prometheus QuickStart](https://github.com/alibaba/canal/wiki/Prometheus-QuickStart)】
+3. 原生支持kafka消息投递 #695 【[Canal Kafka/RocketMQ QuickStart](https://github.com/alibaba/canal/wiki/Canal-Kafka-RocketMQ-QuickStart)】
+4. 原生支持aliyun rds的binlog订阅 (解决自动主备切换/oss binlog离线解析) 参考: 【[Aliyun RDS QuickStart](https://github.com/alibaba/canal/wiki/aliyun-RDS-QuickStart)】
+5. 原生支持docker镜像 #801 参考:  【[Docker QuickStart](https://github.com/alibaba/canal/wiki/Docker-QuickStart)】
+
 <h1>相关文档</h1>
 
 See the wiki page for : <a href="https://github.com/alibaba/canal/wiki" >wiki文档</a>
@@ -52,20 +56,41 @@ See the wiki page for : <a href="https://github.com/alibaba/canal/wiki" >wiki文
 <ul>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Home">Home</a></li>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Introduction">Introduction</a></li>
-<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/QuickStart">QuickStart</a></li>
-<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/ClientExample">ClientExample</a></li>
+<li>
+<a class="internal present" href="https://github.com/alibaba/canal/wiki/QuickStart">QuickStart</a>
+<ul>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Docker-QuickStart">Docker QuickStart</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Canal-Kafka-RocketMQ-QuickStart">Canal Kafka/RocketMQ QuickStart</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/aliyun-RDS-QuickStart">Aliyun RDS QuickStart</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Prometheus-QuickStart">Prometheus QuickStart</a></li>
+</ul>
+</li>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/AdminGuide">AdminGuide</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/ClientExample">ClientExample</a></li>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/ClientAPI">ClientAPI</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/Performance">Performance</a></li>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/DevGuide">DevGuide</a></li>
 <li><a class="internal present" href="https://github.com/alibaba/canal/wiki/BinlogChange%28mysql5.6%29">BinlogChange(Mysql5.6)</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/BinlogChange%28MariaDB%29">BinlogChange(MariaDB)</a></li>
+<li><a class="internal present" href="https://github.com/alibaba/canal/wiki/TableMetaTSDB">TableMetaTSDB</a></li>
 <li><a href="http://alibaba.github.com/canal/release.html">ReleaseNotes</a></li>
 <li><a href="https://github.com/alibaba/canal/releases">Download</a></li>
+<li><a class="internal present" href="/alibaba/canal/wiki/FAQ">FAQ</a></li>
 </ul>
 
+<h1>多语言业务</h1>
+
+1. canal整体交互协议设计上使用了protobuf3.0,理论上可以支持绝大部分的多语言场景,欢迎大家提交多客户端的PR
+    * canal java客户端: <a href="https://github.com/alibaba/canal/wiki/ClientExample"> https://github.com/alibaba/canal/wiki/ClientExample </a>
+    * canal c#客户端开源项目地址:<a href="https://github.com/CanalClient/CanalSharp"> https://github.com/CanalSharp/CanalSharp </a>
+    * canal go客户端开源项目地址:<a href="https://github.com/CanalClient/canal-go"> https://github.com/CanalClient/canal-go </a>
+2. canal作为MySQL binlog的增量获取工具,可以将数据投递到MQ系统中,比如Kafka/RocketMQ,可以借助于MQ的多语言能力 
+    * 参考文档: [Canal Kafka/RocketMQ QuickStart](https://github.com/alibaba/canal/wiki/Canal-Kafka-RocketMQ-QuickStart)
+
 <h1>相关资料</h1>
 
 * ADC阿里技术嘉年华分享ppt (放在google docs上,可能需要翻墙): <a href="https://docs.google.com/presentation/d/1MkszUPYRDkfVPz9IqOT1LLT5d9tuwde_WC8GZvjaDRg/edit?usp=sharing">ppt下载</href>  
-* [与阿里巴巴的RocketMQ配合使用](https://github.com/alibaba/RocketMQ)
+* [与阿里巴巴的RocketMQ配合使用](https://github.com/apache/RocketMQ)
 
 <h1>相关开源</h1>
 <ol>
@@ -73,6 +98,14 @@ See the wiki page for : <a href="https://github.com/alibaba/canal/wiki" >wiki文
 <li>阿里巴巴去Oracle数据迁移同步工具(目标支持MySQL/DRDS):<a href="http://github.com/alibaba/yugong">http://github.com/alibaba/yugong</a></li>
 </ol>
 
+<h1>相关产品</h1>
+<ol>
+<li><a href="https://www.aliyun.com/product/drds?spm=5176.55326.cloudEssentials.71.69fd227dRPZj9K">阿里云分布式数据库DRDS</a></li>
+<li><a href="https://www.aliyun.com/product/dts?spm=5176.7947010.cloudEssentials.80.33f734f4JOAxSP">阿里云数据传输服务DTS</a></li>
+<li><a href="https://www.aliyun.com/product/dbs?spm=5176.54487.cloudEssentials.83.34b851a8GmVZg6">阿里云数据库备份服务DBS</a></li>
+<li><a href="https://www.aliyun.com/product/dms?spm=5176.169464.cloudEssentials.81.2e1066feC1sBBL">阿里云数据管理服务DMS</a></li>
+</ol>
+
 <h1>问题反馈</h1>
 <ol>
 <li>qq交流群: 161559791 </li>
@@ -81,9 +114,13 @@ See the wiki page for : <a href="https://github.com/alibaba/canal/wiki" >wiki文
 <li>报告issue:<a href="https://github.com/alibaba/canal/issues">issues</a></li>
 </ol>
 
-<pre>
-【招聘】阿里巴巴中间件团队招聘JAVA高级工程师
-岗位主要为技术型内容(非业务部门),阿里中间件整个体系对于未来想在技术上有所沉淀的同学还是非常有帮助的
-工作地点:杭州、北京均可. ps. 阿里待遇向来都是不错的,有意者可以QQ、微博私聊. 
-具体招聘内容:https://job.alibaba.com/zhaopin/position_detail.htm?positionId=32666
-</pre>
+<h3>最新更新</h3>
+<ol>
+<li>canal发布重大版本更新1.1.0,具体releaseNode参考:<a href="https://github.com/alibaba/canal/releases/tag/canal-1.1.0">https://github.com/alibaba/canal/releases/tag/canal-1.1.0</a></li>
+<li>canal c#客户端开源项目地址:<a href="https://github.com/CanalClient/CanalSharp"> https://github.com/CanalClient/CanalSharp </a>,推荐! </li>
+<li>canal QQ讨论群已经建立,群号:161559791 ,欢迎加入进行技术讨论。</li>
+<li>canal消费端项目开源: Otter(分布式数据库同步系统),地址:<a href="https://github.com/alibaba/otter">https://github.com/alibaba/otter</a></li>
+
+<li>Canal已在阿里云推出商业化版本 <a href="https://www.aliyun.com/product/dts?spm=a2c4g.11186623.cloudEssentials.80.srdwr7">数据传输服务DTS</a>, 开通即用,免去部署维护的昂贵使用成本。DTS针对阿里云RDS、DRDS等产品进行了适配,解决了Binlog日志回收,主备切换、VPC网络切换等场景下的订阅高可用问题。同时,针对RDS进行了针对性的性能优化。出于稳定性、性能及成本的考虑,强烈推荐阿里云用户使用DTS产品。<a href="https://help.aliyun.com/document_detail/26592.html?spm=a2c4g.11174283.6.539.t1Y91E">DTS产品使用文档</a></li>
+DTS支持阿里云RDS&DRDS的Binlog日志实时订阅,现推出首月免费体验,限时限量,<a href="https://common-buy.aliyun.com/?commodityCode=dtspre&request=%7b%22dts_function%22%3a%22data_subscribe%22%7d">立即体验>>></a>
+</ol>

+ 545 - 0
client-adapter/README.md

@@ -0,0 +1,545 @@
+## 基本说明
+canal 1.1.1版本之后, 增加客户端数据落地的适配及启动功能, 目前支持功能:
+* 客户端启动器
+* 同步管理REST接口
+* 日志适配器, 作为DEMO
+* HBase的数据同步(表对表同步), ETL功能
+* (后续支持) ElasticSearch多表数据同步,ETL功能
+
+## 环境版本
+* 操作系统:无要求
+* java版本: jdk1.8 以上 
+* canal 版本: 请下载最新的安装包,本文以当前v1.1.1 的canal.deployer-1.1.1.tar.gz为例
+* MySQL版本 :5.7.18
+* HBase版本: Apache HBase 1.1.2, 若和服务端版本不一致可自行替换客户端HBase依赖
+
+## 一、适配器启动器
+client-adapter分为适配器和启动器两部分, 适配器为多个fat jar, 每个适配器会将自己所需的依赖打成一个包, 以SPI的方式让启动器动态加载
+
+
+启动器为 SpringBoot 项目, 支持canal-client启动的同时提供相关REST管理接口, 运行目录结构为:
+```
+- bin
+    restart.sh
+    startup.bat
+    startup.sh
+    stop.sh
+- lib
+    client-adapter.logger-1.1.1-jar-with-dependencies.jar
+    client-adapter.hbase-1.1.1-jar-with-dependencies.jar
+    ...
+- conf
+    application.yml
+    - hbase
+        mytest_person2.yml
+- logs
+```
+以上目录结构最终会打包成 canal-adapter-launcher.tar.gz 压缩包
+
+## 二、启动器
+### 2.1 启动器配置 application.yml
+#### canal相关配置部分说明
+```
+canal.conf:
+  canalServerHost: 127.0.0.1:11111  # 对应单机模式下的canal server的ip:port
+  zookeeperHosts: slave1:2181       # 对应集群模式下的zk地址, 如果配置了canalServerHost, 则以canalServerHost为准
+  bootstrapServers: slave1:6667     # kafka或rocketMQ地址, 与canalServerHost不能并存
+  flatMessage: true                 # 扁平message开关, 是否以json字符串形式投递数据, 仅在kafka/rocketMQ模式下有效
+  canalInstances:                   # canal实例组, 如果是tcp模式可配置此项
+  - instance: example               # 对应canal destination
+    groups:                  # 对应适配器分组, 分组间的适配器并行运行
+    - outAdapters:                  # 适配器列表, 分组内的适配串行运行
+      - name: logger                # 适配器SPI名
+      - name: hbase
+        properties:                 # HBase相关连接参数
+          hbase.zookeeper.quorum: slave1
+          hbase.zookeeper.property.clientPort: 2181
+          zookeeper.znode.parent: /hbase
+  mqTopics:                         # MQ topic租, 如果是kafka或者rockeMQ模式可配置此项, 与canalInstances不能并存
+  - mqMode: kafka                   # MQ的模式: kafak/rocketMQ
+    topic: example                  # MQ topic
+    groups:                         # group组
+    - groupId: g2                   # group id
+      outAdapters:                  # 适配器列表, 以下配置和canalInstances中一样
+      - name: logger                
+```
+#### 适配器相关配置部分说明
+```
+adapter.conf:
+  datasourceConfigs:                # 数据源配置列表, 数据源将在适配器中用于ETL、数据同步回查等使用
+    defaultDS:                      # 数据源 dataSourceKey, 适配器中通过该值获取指定数据源
+      url: jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true
+      username: root
+      password: 121212
+  adapterConfigs:                   # 适配器内部配置列表
+  - hbase/mytest_person2.yml        # 类型/配置文件名, 这里示例就是对应HBase适配器hbase目录下的mytest_person2.yml文件
+```
+## 2.2 同步管理REST接口
+#### 2.2.1 查询所有订阅同步的canal destination或MQ topic
+```
+curl http://127.0.0.1:8081/destinations
+```
+#### 2.2.2 数据同步开关
+```
+curl http://127.0.0.1:8081/syncSwitch/example/off -X PUT
+```
+针对 example 这个canal destination/MQ topic 进行开关操作. off代表关闭, 该destination/topic下的同步将阻塞或者断开连接不再接收数据, on代表开启
+
+注: 如果在配置文件中配置了 zookeeperHosts 项, 则会使用分布式锁来控制HA中的数据同步开关, 如果是单机模式则使用本地锁来控制开关
+#### 2.2.3 数据同步开关状态
+```
+curl http://127.0.0.1:8081/syncSwitch/example
+```
+查看指定 canal destination/MQ topic 的数据同步开关状态
+#### 2.2.4 手动ETL
+```
+curl http://127.0.0.1:8081/etl/hbase/mytest_person2.yml -X POST -d "params=2018-10-21 00:00:00"
+```
+导入数据到指定类型的库
+#### 2.2.5 查看相关库总数据
+```
+curl http://127.0.0.1:8081/count/hbase/mytest_person2.yml
+```
+### 2.3 启动canal-adapter示例
+#### 2.3.1 启动canal server (单机模式), 参考: [Canal QuickStart](https://github.com/alibaba/canal/wiki/QuickStart)
+#### 2.3.2 修改conf/application.yml为:
+```
+server:
+  port: 8081
+logging:
+  level:
+    com.alibaba.otter.canal.client.adapter.hbase: DEBUG
+spring:
+  jackson:
+    date-format: yyyy-MM-dd HH:mm:ss
+    time-zone: GMT+8
+    default-property-inclusion: non_null
+
+canal.conf:
+  canalServerHost: 127.0.0.1:11111
+  flatMessage: true
+  canalInstances:
+  - instance: example
+    adapterGroups:
+    - outAdapters:
+      - name: logger
+```
+启动
+```
+bin/startup.sh
+```
+
+## 三、HBase适配器
+### 3.1 修改启动器配置: application.yml
+```
+server:
+  port: 8081
+logging:
+  level:
+    com.alibaba.otter.canal.client.adapter.hbase: DEBUG
+spring:
+  jackson:
+    date-format: yyyy-MM-dd HH:mm:ss
+    time-zone: GMT+8
+    default-property-inclusion: non_null
+
+canal.conf:
+  canalServerHost: 127.0.0.1:11111
+  flatMessage: true
+  srcDataSources:
+    defaultDS:
+      url: jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true
+      username: root
+      password: 121212
+  canalInstances:
+  - instance: example
+    adapterGroups:
+    - outAdapters:
+      - name: hbase
+        properties:
+          hbase.zookeeper.quorum: slave1
+          hbase.zookeeper.property.clientPort: 2181
+          zookeeper.znode.parent: /hbase
+```
+adapter将会自动加载 conf/hbase 下的所有.yml结尾的配置文件
+### 3.2 适配器表映射文件
+修改 conf/hbase/mytest_person.yml文件:
+```
+dataSourceKey: defaultDS            # 对应application.yml中的datasourceConfigs下的配置
+hbaseMapping:                       # mysql--HBase的单表映射配置
+  mode: STRING                      # HBase中的存储类型, 默认统一存为String, 可选: #PHOENIX  #NATIVE   #STRING 
+                                    # NATIVE: 以java类型为主, PHOENIX: 将类型转换为Phoenix对应的类型
+  destination: example              # 对应 canal destination/MQ topic 名称
+  database: mytest                  # 数据库名/schema名
+  table: person                     # 表名
+  hbaseTable: MYTEST.PERSON         # HBase表名
+  family: CF                        # 默认统一Column Family名称
+  uppercaseQualifier: true          # 字段名转大写, 默认为true
+  commitBatch: 3000                 # 批量提交的大小, ETL中用到
+  #rowKey: id,type                  # 复合字段rowKey不能和columns中的rowKey并存
+                                    # 复合rowKey会以 '|' 分隔
+  columns:                          # 字段映射, 如果不配置将自动映射所有字段, 
+                                    # 并取第一个字段为rowKey, HBase字段名以mysql字段名为主
+    id: ROWKE                       
+    name: CF:NAME
+    email: EMAIL                    # 如果column family为默认CF, 则可以省略
+    type:                           # 如果HBase字段和mysql字段名一致, 则可以省略
+    c_time: 
+    birthday: 
+```
+如果涉及到类型转换,可以如下形式:
+```
+...
+  columns:                         
+    id: ROWKE$STRING                      
+    ...                   
+    type: TYPE$BYTE                          
+    ...
+```
+类型转换涉及到Java类型和Phoenix类型两种, 分别定义如下:
+```
+#Java 类型转换, 对应配置 mode: NATIVE
+$DEFAULT
+$STRING
+$INTEGER
+$LONG
+$SHORT
+$BOOLEAN
+$FLOAT
+$DOUBLE
+$BIGDECIMAL
+$DATE
+$BYTE
+$BYTES
+```
+```
+#Phoenix 类型转换, 对应配置 mode: PHOENIX
+$DEFAULT                  对应PHOENIX里的VARCHAR
+$UNSIGNED_INT             对应PHOENIX里的UNSIGNED_INT           4字节
+$UNSIGNED_LONG            对应PHOENIX里的UNSIGNED_LONG          8字节
+$UNSIGNED_TINYINT         对应PHOENIX里的UNSIGNED_TINYINT       1字节
+$UNSIGNED_SMALLINT        对应PHOENIX里的UNSIGNED_SMALLINT      2字节
+$UNSIGNED_FLOAT           对应PHOENIX里的UNSIGNED_FLOAT         4字节
+$UNSIGNED_DOUBLE          对应PHOENIX里的UNSIGNED_DOUBLE        8字节
+$INTEGER                  对应PHOENIX里的INTEGER                4字节
+$BIGINT                   对应PHOENIX里的BIGINT                 8字节
+$TINYINT                  对应PHOENIX里的TINYINT                1字节
+$SMALLINT                 对应PHOENIX里的SMALLINT               2字节
+$FLOAT                    对应PHOENIX里的FLOAT                  4字节
+$DOUBLE                   对应PHOENIX里的DOUBLE                 8字节
+$BOOLEAN                  对应PHOENIX里的BOOLEAN                1字节
+$TIME                     对应PHOENIX里的TIME                   8字节
+$DATE                     对应PHOENIX里的DATE                   8字节
+$TIMESTAMP                对应PHOENIX里的TIMESTAMP              12字节
+$UNSIGNED_TIME            对应PHOENIX里的UNSIGNED_TIME          8字节
+$UNSIGNED_DATE            对应PHOENIX里的UNSIGNED_DATE          8字节
+$UNSIGNED_TIMESTAMP       对应PHOENIX里的UNSIGNED_TIMESTAMP     12字节
+$VARCHAR                  对应PHOENIX里的VARCHAR                动态长度
+$VARBINARY                对应PHOENIX里的VARBINARY              动态长度
+$DECIMAL                  对应PHOENIX里的DECIMAL                动态长度
+```
+如果不配置将以java对象原生类型默认映射转换
+### 3.3 启动HBase数据同步
+#### 创建HBase表
+在HBase shell中运行:
+```
+create 'MYTEST.PERSON', {NAME=>'CF'}
+```
+#### 启动canal-adapter启动器
+```
+bin/startup.sh
+```
+#### 验证
+修改mysql mytest.person表的数据, 将会自动同步到HBase的MYTEST.PERSON表下面, 并会打出DML的log
+
+
+## 四、关系型数据库适配器
+
+RDB adapter 用于适配mysql到任意关系型数据库(需支持jdbc)的数据同步及导入
+
+### 4.1 修改启动器配置: application.yml, 这里以oracle目标库为例
+```
+server:
+  port: 8081
+logging:
+  level:
+    com.alibaba.otter.canal.client.adapter.rdb: DEBUG
+......
+
+canal.conf:
+  canalServerHost: 127.0.0.1:11111
+  srcDataSources:
+    defaultDS:
+      url: jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true
+      username: root
+      password: 121212
+  canalInstances:
+  - instance: example
+    groups:
+    - outAdapters:
+      - name: rdb                                               # 指定为rdb类型同步
+        key: oracle1                                            # 指定adapter的唯一key, 与表映射配置中outerAdapterKey对应
+        properties:
+          jdbc.driverClassName: oracle.jdbc.OracleDriver        # jdbc驱动名, jdbc的jar包需要自行放致lib目录下
+          jdbc.url: jdbc:oracle:thin:@localhost:49161:XE        # jdbc url
+          jdbc.username: mytest                                 # jdbc username
+          jdbc.password: m121212                                # jdbc password
+          threads: 5                                            # 并行执行的线程数, 默认为1
+          commitSize: 3000                                      # 批次提交的最大行数
+```
+其中 outAdapter 的配置: name统一为rdb, key为对应的数据源的唯一标识需和下面的表映射文件中的outerAdapterKey对应, properties为目标库jdb的相关参数
+adapter将会自动加载 conf/rdb 下的所有.yml结尾的表映射配置文件
+
+### 4.2 适配器表映射文件
+修改 conf/rdb/mytest_user.yml文件:
+```
+dataSourceKey: defaultDS        # 源数据源的key, 对应上面配置的srcDataSources中的值
+destination: example            # cannal的instance或者MQ的topic
+outerAdapterKey: oracle1        # adapter key, 对应上面配置outAdapters中的key
+concurrent: true                # 是否按主键hase并行同步, 并行同步的表必须保证主键不会更改及主键不能为其他同步表的外键!!
+dbMapping:
+  database: mytest              # 源数据源的database/shcema
+  table: user                   # 源数据源表名
+  targetTable: mytest.tb_user   # 目标数据源的库名.表名
+  targetPk:                     # 主键映射
+    id: id                      # 如果是复合主键可以换行映射多个
+#  mapAll: true                 # 是否整表映射, 要求源表和目标表字段名一模一样 (如果targetColumns也配置了映射,则以targetColumns配置为准)
+  targetColumns:                # 字段映射, 格式: 目标表字段: 源表字段, 如果字段名一样源表字段名可不填
+    id:
+    name:
+    role_id:
+    c_time:
+    test1: 
+```
+导入的类型以目标表的元类型为准, 将自动转换
+
+### 4.3 启动RDB数据同步
+#### 将目标库的jdbc jar包放入lib文件夹, 这里放入ojdbc6.jar
+
+#### 启动canal-adapter启动器
+```
+bin/startup.sh
+```
+#### 验证
+修改mysql mytest.user表的数据, 将会自动同步到Oracle的MYTEST.TB_USER表下面, 并会打出DML的log
+
+
+## 五、ElasticSearch适配器
+### 5.1 修改启动器配置: application.yml
+```
+server:
+  port: 8081
+logging:
+  level:
+    com.alibaba.otter.canal.client.adapter.es: DEBUG
+spring:
+  jackson:
+    date-format: yyyy-MM-dd HH:mm:ss
+    time-zone: GMT+8
+    default-property-inclusion: non_null
+
+canal.conf:
+  canalServerHost: 127.0.0.1:11111
+  flatMessage: true
+  srcDataSources:
+    defaultDS:
+      url: jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true
+      username: root
+      password: 121212
+  canalInstances:
+  - instance: example
+    adapterGroups:
+    - outAdapters:
+      - name: es
+        hosts: 127.0.0.1:9300               # es 集群地址, 逗号分隔
+        properties:
+          cluster.name: elasticsearch       # es cluster name
+```
+adapter将会自动加载 conf/es 下的所有.yml结尾的配置文件
+### 5.2 适配器表映射文件
+修改 conf/es/mytest_user.yml文件:
+```
+dataSourceKey: defaultDS        # 源数据源的key, 对应上面配置的srcDataSources中的值
+destination: example            # cannal的instance或者MQ的topic
+esMapping:
+  _index: mytest_user           # es 的索引名称
+  _type: _doc                   # es 的doc名称
+  _id: _id                      # es 的_id, 如果不配置该项必须配置下面的pk项_id则会由es自动分配
+#  pk: id                       # 如果不需要_id, 则需要指定一个属性为主键属性
+  # sql映射
+  sql: "select a.id as _id, a.name as _name, a.role_id as _role_id, b.role_name as _role_name,
+        a.c_time as _c_time, c.labels as _labels from user a
+        left join role b on b.id=a.role_id
+        left join (select user_id, group_concat(label order by id desc separator ';') as labels from label
+        group by user_id) c on c.user_id=a.id"
+#  objFields:
+#    _labels: array:;           # 数组或者对象属性, array:; 代表以;字段里面是以;分隔的
+#    _obj: obj:{"test":"123"}
+  etlCondition: "where a.c_time>='{0}'"     # etl 的条件参数
+  commitBatch: 3000                         # 提交批大小
+```
+sql映射说明: 
+
+sql支持多表关联自由组合, 但是有一定的限制: 
+1. 主表不能为子查询语句
+2. 只能使用left outer join即最左表一定要是主表
+3. 关联从表如果是子查询不能有多张表
+4. 主sql中不能有where查询条件(从表子查询中可以有where条件但是不推荐, 可能会造成数据同步的不一致, 比如修改了where条件中的字段内容)
+5. 关联条件只允许主外键的'='操作不能出现其他常量判断比如: on a.role_id=b.id and b.statues=1
+6. 关联条件必须要有一个字段出现在主查询语句中比如: on a.role_id=b.id  其中的 a.role_id 或者 b.id 必须出现在主select语句中
+
+
+Elastic Search的mapping 属性与sql的查询值将一一对应(不支持 select *), 比如: select a.id as _id, a.name, a.email as _email from user, 其中name将映射到es mapping的name field, _email将
+映射到mapping的_email field, 这里以别名(如果有别名)作为最终的映射字段. 这里的_id可以填写到配置文件的 _id: _id映射. 
+
+#### 5.2.1.单表映射索引示例sql:
+```
+select a.id as _id, a.name, a.role_id, a.c_time from user a
+```
+该sql对应的es mapping示例:
+```
+{
+    "mytest_user": {
+        "mappings": {
+            "_doc": {
+                "properties": {
+                    "name": {
+                        "type": "text"
+                    },
+                    "role_id": {
+                        "type": "long"
+                    },
+                    "c_time": {
+                        "type": "date"
+                    }
+                }
+            }
+        }
+    }
+}
+```
+
+#### 5.2.2.单表映射索引示例sql带函数或运算操作:
+```
+select a.id as _id, concat(a.name,'_test') as name, a.role_id+10000 as role_id, a.c_time from user a
+```
+函数字段后必须跟上别名, 该sql对应的es mapping示例:
+```
+{
+    "mytest_user": {
+        "mappings": {
+            "_doc": {
+                "properties": {
+                    "name": {
+                        "type": "text"
+                    },
+                    "role_id": {
+                        "type": "long"
+                    },
+                    "c_time": {
+                        "type": "date"
+                    }
+                }
+            }
+        }
+    }
+}
+```
+
+#### 5.2.3.多表映射(一对一, 多对一)索引示例sql:
+```
+select a.id as _id, a.name, a.role_id, b.role_name, a.c_time from user a 
+left join role b on b.id = a.role_id
+```
+注:这里join操作只能是left outer join, 第一张表必须为主表!!
+
+该sql对应的es mapping示例:
+```
+{
+    "mytest_user": {
+        "mappings": {
+            "_doc": {
+                "properties": {
+                    "name": {
+                        "type": "text"
+                    },
+                    "role_id": {
+                        "type": "long"
+                    },
+                    "role_name": {
+                        "type": "text"
+                    },
+                    "c_time": {
+                        "type": "date"
+                    }
+                }
+            }
+        }
+    }
+}
+```
+
+#### 5.2.4.多表映射(一对多)索引示例sql:
+```
+select a.id as _id, a.name, a.role_id, c.labels, a.c_time from user a 
+left join (select user_id, group_concat(label order by id desc separator ';') as labels from label
+        group by user_id) c on c.user_id=a.id
+```
+注:left join 后的子查询只允许一张表, 即子查询中不能再包含子查询或者关联!!
+
+该sql对应的es mapping示例:
+```
+{
+    "mytest_user": {
+        "mappings": {
+            "_doc": {
+                "properties": {
+                    "name": {
+                        "type": "text"
+                    },
+                    "role_id": {
+                        "type": "long"
+                    },
+                    "c_time": {
+                        "type": "date"
+                    },
+                    "labels": {
+                        "type": "text"
+                    }
+                }
+            }
+        }
+    }
+}
+```
+
+#### 5.2.5.其它类型的sql示例:
+- geo type
+```
+select ... concat(IFNULL(a.latitude, 0), ',', IFNULL(a.longitude, 0)) AS location, ...
+```
+- 复合主键
+```
+select concat(a.id,'_',b.type) as _id, ... from user a left join role b on b.id=a.role_id
+```
+- 数组字段
+```
+select a.id as _id, a.name, a.role_id, c.labels, a.c_time from user a 
+left join (select user_id, group_concat(label order by id desc separator ';') as labels from label
+        group by user_id) c on c.user_id=a.id
+```
+配置中使用:
+```
+objFields:
+  labels: array:;
+```
+
+### 5.3 启动ES数据同步
+#### 启动canal-adapter启动器
+```
+bin/startup.sh
+```
+#### 验证
+1. 新增mysql mytest.user表的数据, 将会自动同步到es的mytest_user索引下面, 并会打出DML的log
+2. 修改mysql mytest.role表的role_name, 将会自动同步es的mytest_user索引中的role_name数据
+3. 新增或者修改mysql mytest.label表的label, 将会自动同步es的mytest_user索引中的labels数据

+ 36 - 0
client-adapter/common/pom.xml

@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.common</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter common module for otter ${project.version}</name>
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>canal.protocol</artifactId>
+            <version>1.1.3-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>joda-time</groupId>
+            <artifactId>joda-time</artifactId>
+            <version>2.9.4</version>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>druid</artifactId>
+            <version>1.1.9</version>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>5.1.40</version>
+        </dependency>
+    </dependencies>
+
+</project>

+ 68 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/OuterAdapter.java

@@ -0,0 +1,68 @@
+package com.alibaba.otter.canal.client.adapter;
+
+import java.util.List;
+import java.util.Map;
+
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+import com.alibaba.otter.canal.client.adapter.support.SPI;
+
+/**
+ * 外部适配器接口
+ *
+ * @author reweerma 2018-8-18 下午10:14:02
+ * @version 1.0.0
+ */
+@SPI("logger")
+public interface OuterAdapter {
+
+    /**
+     * 外部适配器初始化接口
+     *
+     * @param configuration 外部适配器配置信息
+     */
+    void init(OuterAdapterConfig configuration);
+
+    /**
+     * 往适配器中同步数据
+     *
+     * @param dmls 数据包
+     */
+    void sync(List<Dml> dmls);
+
+    /**
+     * 外部适配器销毁接口
+     */
+    void destroy();
+
+    /**
+     * Etl操作
+     * 
+     * @param task 任务名, 对应配置名
+     * @param params etl筛选条件
+     */
+    default EtlResult etl(String task, List<String> params) {
+        throw new UnsupportedOperationException("unsupported operation");
+    }
+
+    /**
+     * 计算总数
+     * 
+     * @param task 任务名, 对应配置名
+     * @return 总数
+     */
+    default Map<String, Object> count(String task) {
+        throw new UnsupportedOperationException("unsupported operation");
+    }
+
+    /**
+     * 通过task获取对应的destination
+     * 
+     * @param task 任务名, 对应配置名
+     * @return destination
+     */
+    default String getDestination(String task) {
+        return null;
+    }
+}

+ 192 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/CanalClientConfig.java

@@ -0,0 +1,192 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * 配置信息类
+ *
+ * @author rewerma 2018-8-18 下午10:40:12
+ * @version 1.0.0
+ */
+public class CanalClientConfig {
+
+    // 单机模式下canal server的ip:port
+    private String             canalServerHost;
+    // 集群模式下的zk地址,如果配置了单机地址则以单机为准!!
+    private String             zookeeperHosts;
+    // kafka or rocket mq 地址
+    private String             mqServers;
+    // 是否已flatMessage模式传输,只适用于mq模式
+    private Boolean            flatMessage   = true;
+    // 批大小
+    private Integer            batchSize;
+    // 同步分批提交大小
+    private Integer            syncBatchSize = 1000;
+    // 重试次数
+    private Integer            retries;
+    // 消费超时时间
+    private Long               timeout;
+    // 模式 tcp kafka rocketMQ
+    private String             mode          = "tcp";
+    // aliyun ak/sk
+    private String             accessKey;
+    private String             secretKey;
+
+    // canal adapters 配置
+    private List<CanalAdapter> canalAdapters;
+
+    public String getCanalServerHost() {
+        return canalServerHost;
+    }
+
+    public void setCanalServerHost(String canalServerHost) {
+        this.canalServerHost = canalServerHost;
+    }
+
+    public String getZookeeperHosts() {
+        return zookeeperHosts;
+    }
+
+    public void setZookeeperHosts(String zookeeperHosts) {
+        this.zookeeperHosts = zookeeperHosts;
+    }
+
+    public String getMqServers() {
+        return mqServers;
+    }
+
+    public void setMqServers(String mqServers) {
+        this.mqServers = mqServers;
+    }
+
+    public Boolean getFlatMessage() {
+        return flatMessage;
+    }
+
+    public void setFlatMessage(Boolean flatMessage) {
+        this.flatMessage = flatMessage;
+    }
+
+    public Integer getBatchSize() {
+        return batchSize;
+    }
+
+    public void setBatchSize(Integer batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    public Integer getRetries() {
+        return retries;
+    }
+
+    public Integer getSyncBatchSize() {
+        return syncBatchSize;
+    }
+
+    public void setSyncBatchSize(Integer syncBatchSize) {
+        this.syncBatchSize = syncBatchSize;
+    }
+
+    public void setRetries(Integer retries) {
+        this.retries = retries;
+    }
+
+    public Long getTimeout() {
+        return timeout;
+    }
+
+    public void setTimeout(Long timeout) {
+        this.timeout = timeout;
+    }
+
+    public String getMode() {
+        return mode;
+    }
+
+    public void setMode(String mode) {
+        this.mode = mode;
+    }
+
+    public String getAccessKey() {
+        return accessKey;
+    }
+
+    public void setAccessKey(String accessKey) {
+        this.accessKey = accessKey;
+    }
+
+    public String getSecretKey() {
+        return secretKey;
+    }
+
+    public void setSecretKey(String secretKey) {
+        this.secretKey = secretKey;
+    }
+
+    public List<CanalAdapter> getCanalAdapters() {
+        return canalAdapters;
+    }
+
+    public void setCanalAdapters(List<CanalAdapter> canalAdapters) {
+        this.canalAdapters = canalAdapters;
+    }
+
+    public static class CanalAdapter {
+
+        private String      instance; // 实例名
+
+        private List<Group> groups;  // 适配器分组列表
+
+        public String getInstance() {
+            return instance;
+        }
+
+        public void setInstance(String instance) {
+            if (instance != null) {
+                this.instance = instance.trim();
+            }
+        }
+
+        public List<Group> getGroups() {
+            return groups;
+        }
+
+        public void setGroups(List<Group> groups) {
+            this.groups = groups;
+        }
+    }
+
+    public static class Group {
+
+        // group id
+        private String                          groupId          = "default";
+        private List<OuterAdapterConfig>        outerAdapters;                           // 适配器列表
+        private Map<String, OuterAdapterConfig> outerAdaptersMap = new LinkedHashMap<>();
+
+        public String getGroupId() {
+            return groupId;
+        }
+
+        public void setGroupId(String groupId) {
+            this.groupId = groupId;
+        }
+
+        public List<OuterAdapterConfig> getOuterAdapters() {
+            return outerAdapters;
+        }
+
+        public void setOuterAdapters(List<OuterAdapterConfig> outerAdapters) {
+            this.outerAdapters = outerAdapters;
+        }
+
+        public Map<String, OuterAdapterConfig> getOuterAdaptersMap() {
+            return outerAdaptersMap;
+        }
+
+        public void setOuterAdaptersMap(Map<String, OuterAdapterConfig> outerAdaptersMap) {
+            this.outerAdaptersMap = outerAdaptersMap;
+        }
+    }
+}

+ 6 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Constant.java

@@ -0,0 +1,6 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+public class Constant {
+
+    public static final String CONF_DIR = "conf";
+}

+ 81 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/DatasourceConfig.java

@@ -0,0 +1,81 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import com.alibaba.druid.pool.DruidDataSource;
+
+/**
+ * 数据源配置
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class DatasourceConfig {
+
+    public final static Map<String, DruidDataSource> DATA_SOURCES = new ConcurrentHashMap<>(); // key对应的数据源
+
+    private String                                   driver       = "com.mysql.jdbc.Driver";   // 默认为mysql jdbc驱动
+    private String                                   url;                                      // jdbc url
+    private String                                   database;                                 // jdbc database
+    private String                                   type         = "mysql";                   // 类型, 默认为mysql
+    private String                                   username;                                 // jdbc username
+    private String                                   password;                                 // jdbc password
+    private Integer                                  maxActive    = 3;                         // 连接池最大连接数,默认为3
+
+    public String getDriver() {
+        return driver;
+    }
+
+    public void setDriver(String driver) {
+        this.driver = driver;
+    }
+
+    public String getUrl() {
+        return url;
+    }
+
+    public void setUrl(String url) {
+        this.url = url;
+    }
+
+    public String getDatabase() {
+        return database;
+    }
+
+    public void setDatabase(String database) {
+        this.database = database;
+    }
+
+    public String getType() {
+        return type;
+    }
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+
+    public void setUsername(String username) {
+        this.username = username;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public Integer getMaxActive() {
+        return maxActive;
+    }
+
+    public void setMaxActive(Integer maxActive) {
+        this.maxActive = maxActive;
+    }
+}

+ 118 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Dml.java

@@ -0,0 +1,118 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * DML操作转换对象
+ *
+ * @author rewerma 2018-8-19 下午11:30:49
+ * @version 1.0.0
+ */
+public class Dml implements Serializable {
+
+    private static final long         serialVersionUID = 2611556444074013268L;
+
+    private String                    destination;                            // 对应canal的实例或者MQ的topic
+    private String                    database;                               // 数据库或schema
+    private String                    table;                                  // 表名
+    private String                    type;                                   // 类型: INSERT UPDATE DELETE
+    // binlog executeTime
+    private Long                      es;                                     // 执行耗时
+    // dml build timeStamp
+    private Long                      ts;                                     // 同步时间
+    private String                    sql;                                    // 执行的sql, dml sql为空
+    private List<Map<String, Object>> data;                                   // 数据列表
+    private List<Map<String, Object>> old;                                    // 旧数据列表, 用于update, size和data的size一一对应
+
+    public String getDestination() {
+        return destination;
+    }
+
+    public void setDestination(String destination) {
+        this.destination = destination;
+    }
+
+    public String getDatabase() {
+        return database;
+    }
+
+    public void setDatabase(String database) {
+        this.database = database;
+    }
+
+    public String getTable() {
+        return table;
+    }
+
+    public void setTable(String table) {
+        this.table = table;
+    }
+
+    public String getType() {
+        return type;
+    }
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+    public Long getTs() {
+        return ts;
+    }
+
+    public void setTs(Long ts) {
+        this.ts = ts;
+    }
+
+    public String getSql() {
+        return sql;
+    }
+
+    public void setSql(String sql) {
+        this.sql = sql;
+    }
+
+    public List<Map<String, Object>> getData() {
+        return data;
+    }
+
+    public void setData(List<Map<String, Object>> data) {
+        this.data = data;
+    }
+
+    public List<Map<String, Object>> getOld() {
+        return old;
+    }
+
+    public void setOld(List<Map<String, Object>> old) {
+        this.old = old;
+    }
+
+    public Long getEs() {
+        return es;
+    }
+
+    public void setEs(Long es) {
+        this.es = es;
+    }
+
+    public void clear() {
+        database = null;
+        table = null;
+        type = null;
+        ts = null;
+        es = null;
+        data = null;
+        old = null;
+        sql = null;
+    }
+
+    @Override
+    public String toString() {
+        return "Dml{" + "destination='" + destination + '\'' + ", database='" + database + '\'' + ", table='" + table
+               + '\'' + ", type='" + type + '\'' + ", es=" + es + ", ts=" + ts + ", sql='" + sql + '\'' + ", data="
+               + data + ", old=" + old + '}';
+    }
+}

+ 44 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/EtlResult.java

@@ -0,0 +1,44 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.Serializable;
+
+/**
+ * ETL的结果对象
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class EtlResult implements Serializable {
+
+    private static final long serialVersionUID = 4250522736289866505L;
+
+    private boolean           succeeded        = false;
+
+    private String            resultMessage;
+
+    private String            errorMessage;
+
+    public boolean getSucceeded() {
+        return succeeded;
+    }
+
+    public void setSucceeded(boolean succeeded) {
+        this.succeeded = succeeded;
+    }
+
+    public String getResultMessage() {
+        return resultMessage;
+    }
+
+    public void setResultMessage(String resultMessage) {
+        this.resultMessage = resultMessage;
+    }
+
+    public String getErrorMessage() {
+        return errorMessage;
+    }
+
+    public void setErrorMessage(String errorMessage) {
+        this.errorMessage = errorMessage;
+    }
+}

+ 421 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/ExtensionLoader.java

@@ -0,0 +1,421 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.InputStreamReader;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * SPI 类加载器
+ *
+ * @author rewerma 2018-8-19 下午11:30:49
+ * @version 1.0.0
+ */
+public class ExtensionLoader<T> {
+
+    private static final Logger                                      logger                     = LoggerFactory.getLogger(ExtensionLoader.class);
+
+    private static final String                                      SERVICES_DIRECTORY         = "META-INF/services/";
+
+    private static final String                                      CANAL_DIRECTORY            = "META-INF/canal/";
+
+    private static final String                                      DEFAULT_CLASSLOADER_POLICY = "internal";
+
+    private static final Pattern                                     NAME_SEPARATOR             = Pattern.compile("\\s*[,]+\\s*");
+
+    private static final ConcurrentMap<Class<?>, ExtensionLoader<?>> EXTENSION_LOADERS          = new ConcurrentHashMap<>();
+
+    private static final ConcurrentMap<Class<?>, Object>             EXTENSION_INSTANCES        = new ConcurrentHashMap<>();
+
+    private static final ConcurrentMap<String, Object>               EXTENSION_KEY_INSTANCE     = new ConcurrentHashMap<>();
+
+    private final Class<?>                                           type;
+
+    private final String                                             classLoaderPolicy;
+
+    private final ConcurrentMap<Class<?>, String>                    cachedNames                = new ConcurrentHashMap<>();
+
+    private final Holder<Map<String, Class<?>>>                      cachedClasses              = new Holder<>();
+
+    private final ConcurrentMap<String, Holder<Object>>              cachedInstances            = new ConcurrentHashMap<>();
+
+    private String                                                   cachedDefaultName;
+
+    private ConcurrentHashMap<String, IllegalStateException>         exceptions                 = new ConcurrentHashMap<>();
+
+    private static <T> boolean withExtensionAnnotation(Class<T> type) {
+        return type.isAnnotationPresent(SPI.class);
+    }
+
+    public static <T> ExtensionLoader<T> getExtensionLoader(Class<T> type) {
+        return getExtensionLoader(type, DEFAULT_CLASSLOADER_POLICY);
+    }
+
+    @SuppressWarnings("unchecked")
+    public static <T> ExtensionLoader<T> getExtensionLoader(Class<T> type, String classLoaderPolicy) {
+        if (type == null) throw new IllegalArgumentException("Extension type == null");
+        if (!type.isInterface()) {
+            throw new IllegalArgumentException("Extension type(" + type + ") is not interface!");
+        }
+        if (!withExtensionAnnotation(type)) {
+            throw new IllegalArgumentException("Extension type(" + type + ") is not extension, because WITHOUT @"
+                                               + SPI.class.getSimpleName() + " Annotation!");
+        }
+
+        ExtensionLoader<T> loader = (ExtensionLoader<T>) EXTENSION_LOADERS.get(type);
+        if (loader == null) {
+            EXTENSION_LOADERS.putIfAbsent(type, new ExtensionLoader<T>(type, classLoaderPolicy));
+            loader = (ExtensionLoader<T>) EXTENSION_LOADERS.get(type);
+        }
+        return loader;
+    }
+
+    private ExtensionLoader(Class<?> type, String classLoaderPolicy){
+        this.type = type;
+        this.classLoaderPolicy = classLoaderPolicy;
+    }
+
+    /**
+     * 返回指定名字的扩展
+     *
+     * @param name
+     * @return
+     */
+    @SuppressWarnings("unchecked")
+    public T getExtension(String name) {
+        if (name == null || name.length() == 0) throw new IllegalArgumentException("Extension name == null");
+        if ("true".equals(name)) {
+            return getDefaultExtension();
+        }
+        Holder<Object> holder = cachedInstances.get(name);
+        if (holder == null) {
+            cachedInstances.putIfAbsent(name, new Holder<>());
+            holder = cachedInstances.get(name);
+        }
+        Object instance = holder.get();
+        if (instance == null) {
+            synchronized (holder) {
+                instance = holder.get();
+                if (instance == null) {
+                    instance = createExtension(name);
+                    holder.set(instance);
+                }
+            }
+        }
+        return (T) instance;
+    }
+
+    @SuppressWarnings("unchecked")
+    public T getExtension(String name, String key) {
+        if (name == null || name.length() == 0) throw new IllegalArgumentException("Extension name == null");
+        if ("true".equals(name)) {
+            return getDefaultExtension();
+        }
+        String extKey = name + "-" + StringUtils.trimToEmpty(key);
+        Holder<Object> holder = cachedInstances.get(extKey);
+        if (holder == null) {
+            cachedInstances.putIfAbsent(extKey, new Holder<>());
+            holder = cachedInstances.get(extKey);
+        }
+        Object instance = holder.get();
+        if (instance == null) {
+            synchronized (holder) {
+                instance = holder.get();
+                if (instance == null) {
+                    instance = createExtension(name, key);
+                    holder.set(instance);
+                }
+            }
+        }
+        return (T) instance;
+    }
+
+    /**
+     * 返回缺省的扩展,如果没有设置则返回<code>null</code>
+     */
+    public T getDefaultExtension() {
+        getExtensionClasses();
+        if (null == cachedDefaultName || cachedDefaultName.length() == 0 || "true".equals(cachedDefaultName)) {
+            return null;
+        }
+        return getExtension(cachedDefaultName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private T createExtension(String name) {
+        Class<?> clazz = getExtensionClasses().get(name);
+        if (clazz == null) {
+            throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
+                                            + ")  could not be instantiated: class could not be found");
+        }
+        try {
+            T instance = (T) EXTENSION_INSTANCES.get(clazz);
+            if (instance == null) {
+                EXTENSION_INSTANCES.putIfAbsent(clazz, (T) clazz.newInstance());
+                instance = (T) EXTENSION_INSTANCES.get(clazz);
+            }
+            return instance;
+        } catch (Throwable t) {
+            throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
+                                            + ")  could not be instantiated: " + t.getMessage(), t);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private T createExtension(String name, String key) {
+        Class<?> clazz = getExtensionClasses().get(name);
+        if (clazz == null) {
+            throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
+                                            + ")  could not be instantiated: class could not be found");
+        }
+        try {
+            T instance = (T) EXTENSION_KEY_INSTANCE.get(name + "-" + key);
+            if (instance == null) {
+                EXTENSION_KEY_INSTANCE.putIfAbsent(name + "-" + key, clazz.newInstance());
+                instance = (T) EXTENSION_KEY_INSTANCE.get(name + "-" + key);
+            }
+            return instance;
+        } catch (Throwable t) {
+            throw new IllegalStateException("Extension instance(name: " + name + ", class: " + type
+                                            + ")  could not be instantiated: " + t.getMessage(), t);
+        }
+    }
+
+    private Map<String, Class<?>> getExtensionClasses() {
+        Map<String, Class<?>> classes = cachedClasses.get();
+        if (classes == null) {
+            synchronized (cachedClasses) {
+                classes = cachedClasses.get();
+                if (classes == null) {
+                    classes = loadExtensionClasses();
+                    cachedClasses.set(classes);
+                }
+            }
+        }
+
+        return classes;
+    }
+
+    private String getJarDirectoryPath() {
+        URL url = Thread.currentThread().getContextClassLoader().getResource("");
+        String dirtyPath;
+        if (url != null) {
+            dirtyPath = url.toString();
+        } else {
+            File file = new File("");
+            dirtyPath = file.getAbsolutePath();
+        }
+        String jarPath = dirtyPath.replaceAll("^.*file:/", ""); // removes
+                                                                // file:/ and
+                                                                // everything
+                                                                // before it
+        jarPath = jarPath.replaceAll("jar!.*", "jar"); // removes everything
+                                                       // after .jar, if .jar
+                                                       // exists in dirtyPath
+        jarPath = jarPath.replaceAll("%20", " "); // necessary if path has
+                                                  // spaces within
+        if (!jarPath.endsWith(".jar")) { // this is needed if you plan to run
+                                         // the app using Spring Tools Suit play
+                                         // button.
+            jarPath = jarPath.replaceAll("/classes/.*", "/classes/");
+        }
+        return Paths.get(jarPath).getParent().toString(); // Paths - from java 8
+    }
+
+    private Map<String, Class<?>> loadExtensionClasses() {
+        final SPI defaultAnnotation = type.getAnnotation(SPI.class);
+        if (defaultAnnotation != null) {
+            String value = defaultAnnotation.value();
+            if ((value = value.trim()).length() > 0) {
+                String[] names = NAME_SEPARATOR.split(value);
+                if (names.length > 1) {
+                    throw new IllegalStateException("more than 1 default extension name on extension " + type.getName()
+                                                    + ": " + Arrays.toString(names));
+                }
+                if (names.length == 1) cachedDefaultName = names[0];
+            }
+        }
+
+        Map<String, Class<?>> extensionClasses = new HashMap<String, Class<?>>();
+
+        // 1. plugin folder,customized extension classLoader (jar_dir/plugin)
+        String dir = File.separator + this.getJarDirectoryPath() + File.separator + "plugin";
+
+        File externalLibDir = new File(dir);
+        if (!externalLibDir.exists()) {
+            externalLibDir = new File(File.separator + this.getJarDirectoryPath() + File.separator + "canal-adapter"
+                                      + File.separator + "plugin");
+        }
+        logger.info("extension classpath dir: " + externalLibDir.getAbsolutePath());
+        if (externalLibDir.exists()) {
+            File[] files = externalLibDir.listFiles((dir1, name) -> name.endsWith(".jar"));
+            if (files != null) {
+                for (File f : files) {
+                    URL url;
+                    try {
+                        url = f.toURI().toURL();
+                    } catch (MalformedURLException e) {
+                        throw new RuntimeException("load extension jar failed!", e);
+                    }
+
+                    ClassLoader parent = Thread.currentThread().getContextClassLoader();
+                    URLClassLoader localClassLoader;
+                    if (classLoaderPolicy == null || "".equals(classLoaderPolicy)
+                        || DEFAULT_CLASSLOADER_POLICY.equalsIgnoreCase(classLoaderPolicy)) {
+                        localClassLoader = new URLClassExtensionLoader(new URL[] { url });
+                    } else {
+                        localClassLoader = new URLClassLoader(new URL[] { url }, parent);
+                    }
+
+                    loadFile(extensionClasses, CANAL_DIRECTORY, localClassLoader);
+                    loadFile(extensionClasses, SERVICES_DIRECTORY, localClassLoader);
+                }
+            }
+        }
+        // 只加载外部spi, 不加载classpath
+        // 2. load inner extension class with default classLoader
+        // ClassLoader classLoader = findClassLoader();
+        // loadFile(extensionClasses, CANAL_DIRECTORY, classLoader);
+        // loadFile(extensionClasses, SERVICES_DIRECTORY, classLoader);
+
+        return extensionClasses;
+    }
+
+    private void loadFile(Map<String, Class<?>> extensionClasses, String dir, ClassLoader classLoader) {
+        String fileName = dir + type.getName();
+        try {
+            Enumeration<URL> urls;
+            if (classLoader != null) {
+                urls = classLoader.getResources(fileName);
+            } else {
+                urls = ClassLoader.getSystemResources(fileName);
+            }
+            if (urls != null) {
+                while (urls.hasMoreElements()) {
+                    URL url = urls.nextElement();
+                    try {
+                        BufferedReader reader = null;
+                        try {
+                            reader = new BufferedReader(new InputStreamReader(url.openStream(), "utf-8"));
+                            String line = null;
+                            while ((line = reader.readLine()) != null) {
+                                final int ci = line.indexOf('#');
+                                if (ci >= 0) line = line.substring(0, ci);
+                                line = line.trim();
+                                if (line.length() > 0) {
+                                    try {
+                                        String name = null;
+                                        int i = line.indexOf('=');
+                                        if (i > 0) {
+                                            name = line.substring(0, i).trim();
+                                            line = line.substring(i + 1).trim();
+                                        }
+                                        if (line.length() > 0) {
+                                            Class<?> clazz = classLoader.loadClass(line);
+                                            // Class<?> clazz =
+                                            // Class.forName(line, true,
+                                            // classLoader);
+                                            if (!type.isAssignableFrom(clazz)) {
+                                                throw new IllegalStateException("Error when load extension class(interface: "
+                                                                                + type
+                                                                                + ", class line: "
+                                                                                + clazz.getName()
+                                                                                + "), class "
+                                                                                + clazz.getName()
+                                                                                + "is not subtype of interface.");
+                                            } else {
+                                                try {
+                                                    clazz.getConstructor(type);
+                                                } catch (NoSuchMethodException e) {
+                                                    clazz.getConstructor();
+                                                    String[] names = NAME_SEPARATOR.split(name);
+                                                    if (names != null && names.length > 0) {
+                                                        for (String n : names) {
+                                                            if (!cachedNames.containsKey(clazz)) {
+                                                                cachedNames.put(clazz, n);
+                                                            }
+                                                            Class<?> c = extensionClasses.get(n);
+                                                            if (c == null) {
+                                                                extensionClasses.put(n, clazz);
+                                                            } else if (c != clazz) {
+                                                                cachedNames.remove(clazz);
+                                                                throw new IllegalStateException("Duplicate extension "
+                                                                                                + type.getName()
+                                                                                                + " name " + n + " on "
+                                                                                                + c.getName() + " and "
+                                                                                                + clazz.getName());
+                                                            }
+                                                        }
+                                                    }
+                                                }
+                                            }
+                                        }
+                                    } catch (Throwable t) {
+                                        IllegalStateException e = new IllegalStateException("Failed to load extension class(interface: "
+                                                                                            + type
+                                                                                            + ", class line: "
+                                                                                            + line
+                                                                                            + ") in "
+                                                                                            + url
+                                                                                            + ", cause: "
+                                                                                            + t.getMessage(),
+                                            t);
+                                        exceptions.put(line, e);
+                                    }
+                                }
+                            } // end of while read lines
+                        } finally {
+                            if (reader != null) {
+                                reader.close();
+                            }
+                        }
+                    } catch (Throwable t) {
+                        logger.error("Exception when load extension class(interface: " + type + ", class file: " + url
+                                     + ") in " + url, t);
+                    }
+                } // end of while urls
+            }
+        } catch (Throwable t) {
+            logger.error("Exception when load extension class(interface: " + type + ", description file: " + fileName
+                         + ").", t);
+        }
+    }
+
+    @SuppressWarnings("unused")
+    private static ClassLoader findClassLoader() {
+        return ExtensionLoader.class.getClassLoader();
+    }
+
+    @Override
+    public String toString() {
+        return this.getClass().getName() + "[" + type.getName() + "]";
+    }
+
+    private static class Holder<T> {
+
+        private volatile T value;
+
+        private void set(T value) {
+            this.value = value;
+        }
+
+        private T get() {
+            return value;
+        }
+
+    }
+}

+ 149 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/JdbcTypeUtil.java

@@ -0,0 +1,149 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.*;
+
+import org.joda.time.DateTime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * 类型转换工具类
+ *
+ * @author rewerma 2018-8-19 下午06:14:23
+ * @version 1.0.0
+ */
+public class JdbcTypeUtil {
+
+    private static Logger logger = LoggerFactory.getLogger(JdbcTypeUtil.class);
+
+    public static Object getRSData(ResultSet rs, String columnName, int jdbcType) throws SQLException {
+        if (jdbcType == Types.BIT || jdbcType == Types.BOOLEAN) {
+            return rs.getByte(columnName);
+        } else {
+            return rs.getObject(columnName);
+        }
+    }
+
+    public static Class<?> jdbcType2javaType(int jdbcType) {
+        switch (jdbcType) {
+            case Types.BIT:
+            case Types.BOOLEAN:
+                // return Boolean.class;
+            case Types.TINYINT:
+                return Byte.TYPE;
+            case Types.SMALLINT:
+                return Short.class;
+            case Types.INTEGER:
+                return Integer.class;
+            case Types.BIGINT:
+                return Long.class;
+            case Types.DECIMAL:
+            case Types.NUMERIC:
+                return BigDecimal.class;
+            case Types.REAL:
+                return Float.class;
+            case Types.FLOAT:
+            case Types.DOUBLE:
+                return Double.class;
+            case Types.CHAR:
+            case Types.VARCHAR:
+            case Types.LONGVARCHAR:
+                return String.class;
+            case Types.BINARY:
+            case Types.VARBINARY:
+            case Types.LONGVARBINARY:
+            case Types.BLOB:
+                return byte[].class;
+            case Types.DATE:
+                return java.sql.Date.class;
+            case Types.TIME:
+                return Time.class;
+            case Types.TIMESTAMP:
+                return Timestamp.class;
+            default:
+                return String.class;
+        }
+    }
+
+    public static Object typeConvert(String columnName, String value, int sqlType, String mysqlType) {
+        if (value == null || value.equals("")) {
+            return null;
+        }
+
+        try {
+            Object res;
+            switch (sqlType) {
+                case Types.INTEGER:
+                    res = Integer.parseInt(value);
+                    break;
+                case Types.SMALLINT:
+                    res = Short.parseShort(value);
+                    break;
+                case Types.BIT:
+                case Types.TINYINT:
+                    res = Byte.parseByte(value);
+                    break;
+                case Types.BIGINT:
+                    if (mysqlType.startsWith("bigint") && mysqlType.endsWith("unsigned")) {
+                        res = new BigInteger(value);
+                    } else {
+                        res = Long.parseLong(value);
+                    }
+                    break;
+                // case Types.BIT:
+                case Types.BOOLEAN:
+                    res = !"0".equals(value);
+                    break;
+                case Types.DOUBLE:
+                case Types.FLOAT:
+                    res = Double.parseDouble(value);
+                    break;
+                case Types.REAL:
+                    res = Float.parseFloat(value);
+                    break;
+                case Types.DECIMAL:
+                case Types.NUMERIC:
+                    res = new BigDecimal(value);
+                    break;
+                case Types.BINARY:
+                case Types.VARBINARY:
+                case Types.LONGVARBINARY:
+                case Types.BLOB:
+                    res = value.getBytes("ISO-8859-1");
+                    break;
+                case Types.DATE:
+                    if (!value.startsWith("0000-00-00")) {
+                        value = value.trim().replace(" ", "T");
+                        DateTime dt = new DateTime(value);
+                        res = new Date(dt.toDate().getTime());
+                    } else {
+                        res = null;
+                    }
+                    break;
+                case Types.TIME:
+                    value = "T" + value;
+                    DateTime dt = new DateTime(value);
+                    res = new Time(dt.toDate().getTime());
+                    break;
+                case Types.TIMESTAMP:
+                    if (!value.startsWith("0000-00-00")) {
+                        value = value.trim().replace(" ", "T");
+                        dt = new DateTime(value);
+                        res = new Timestamp(dt.toDate().getTime());
+                    } else {
+                        res = null;
+                    }
+                    break;
+                case Types.CLOB:
+                default:
+                    res = value;
+            }
+            return res;
+        } catch (Exception e) {
+            logger.error("table: {} column: {}, failed convert type {} to {}", columnName, value, sqlType);
+            return value;
+        }
+    }
+}

+ 71 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java

@@ -0,0 +1,71 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
+public class MappingConfigsLoader {
+
+    public static Map<String, String> loadConfigs(String name) {
+        Map<String, String> configContentMap = new HashMap<>();
+
+        // 先取本地文件,再取类路径
+        File configDir = new File(".." + File.separator + Constant.CONF_DIR + File.separator + name);
+        if (!configDir.exists()) {
+            URL url = MappingConfigsLoader.class.getClassLoader().getResource("");
+            if (url != null) {
+                configDir = new File(url.getPath() + name + File.separator);
+            }
+        }
+
+        File[] files = configDir.listFiles();
+        if (files != null) {
+            for (File file : files) {
+                String fileName = file.getName();
+                if (!fileName.endsWith(".yml")) {
+                    continue;
+                }
+                try (InputStream in = new FileInputStream(file)) {
+                    byte[] bytes = new byte[in.available()];
+                    in.read(bytes);
+                    String configContent = new String(bytes, StandardCharsets.UTF_8);
+                    configContentMap.put(fileName, configContent);
+                } catch (IOException e) {
+                    throw new RuntimeException("Read " + name + "mapping config: " + fileName + " error. ", e);
+                }
+            }
+        }
+
+        return configContentMap;
+    }
+
+    public static String loadConfig(String name) {
+        // 先取本地文件,再取类路径
+        File filePath = new File(".." + File.separator + Constant.CONF_DIR + File.separator + name);
+        if (!filePath.exists()) {
+            URL url = MappingConfigsLoader.class.getClassLoader().getResource("");
+            if (url != null) {
+                filePath = new File(url.getPath() + name);
+            }
+        }
+        if (filePath.exists()) {
+            String fileName = filePath.getName();
+            if (!fileName.endsWith(".yml")) {
+                return null;
+            }
+            try (InputStream in = new FileInputStream(filePath)) {
+                byte[] bytes = new byte[in.available()];
+                in.read(bytes);
+                return new String(bytes, StandardCharsets.UTF_8);
+            } catch (IOException e) {
+                throw new RuntimeException("Read mapping config: " + filePath.getAbsolutePath() + " error. ", e);
+            }
+        }
+        return null;
+    }
+}

+ 181 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MessageUtil.java

@@ -0,0 +1,181 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import com.alibaba.otter.canal.protocol.CanalEntry;
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import com.alibaba.otter.canal.protocol.Message;
+
+/**
+ * Message对象解析工具类
+ *
+ * @author rewerma 2018-8-19 下午06:14:23
+ * @version 1.0.0
+ */
+public class MessageUtil {
+
+    public static List<Dml> parse4Dml(String destination, Message message) {
+        if (message == null) {
+            return null;
+        }
+        List<CanalEntry.Entry> entries = message.getEntries();
+        List<Dml> dmls = new ArrayList<Dml>(entries.size());
+        for (CanalEntry.Entry entry : entries) {
+            if (entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONBEGIN
+                || entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONEND) {
+                continue;
+            }
+
+            CanalEntry.RowChange rowChange;
+            try {
+                rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:" + entry.toString(),
+                    e);
+            }
+
+            CanalEntry.EventType eventType = rowChange.getEventType();
+
+            final Dml dml = new Dml();
+            dml.setDestination(destination);
+            dml.setDatabase(entry.getHeader().getSchemaName());
+            dml.setTable(entry.getHeader().getTableName());
+            dml.setType(eventType.toString());
+            dml.setEs(entry.getHeader().getExecuteTime());
+            dml.setTs(System.currentTimeMillis());
+            dml.setSql(rowChange.getSql());
+            dmls.add(dml);
+            List<Map<String, Object>> data = new ArrayList<>();
+            List<Map<String, Object>> old = new ArrayList<>();
+
+            if (!rowChange.getIsDdl()) {
+                Set<String> updateSet = new HashSet<>();
+                for (CanalEntry.RowData rowData : rowChange.getRowDatasList()) {
+                    if (eventType != CanalEntry.EventType.INSERT && eventType != CanalEntry.EventType.UPDATE
+                        && eventType != CanalEntry.EventType.DELETE) {
+                        continue;
+                    }
+
+                    Map<String, Object> row = new LinkedHashMap<>();
+                    List<CanalEntry.Column> columns;
+
+                    if (eventType == CanalEntry.EventType.DELETE) {
+                        columns = rowData.getBeforeColumnsList();
+                    } else {
+                        columns = rowData.getAfterColumnsList();
+                    }
+
+                    for (CanalEntry.Column column : columns) {
+                        row.put(column.getName(),
+                            JdbcTypeUtil.typeConvert(column.getName(),
+                                column.getValue(),
+                                column.getSqlType(),
+                                column.getMysqlType()));
+                        // 获取update为true的字段
+                        if (column.getUpdated()) {
+                            updateSet.add(column.getName());
+                        }
+                    }
+                    if (!row.isEmpty()) {
+                        data.add(row);
+                    }
+
+                    if (eventType == CanalEntry.EventType.UPDATE) {
+                        Map<String, Object> rowOld = new LinkedHashMap<>();
+                        for (CanalEntry.Column column : rowData.getBeforeColumnsList()) {
+                            if (updateSet.contains(column.getName())) {
+                                rowOld.put(column.getName(),
+                                    JdbcTypeUtil.typeConvert(column.getName(),
+                                        column.getValue(),
+                                        column.getSqlType(),
+                                        column.getMysqlType()));
+                            }
+                        }
+                        // update操作将记录修改前的值
+                        if (!rowOld.isEmpty()) {
+                            old.add(rowOld);
+                        }
+                    }
+                }
+                if (!data.isEmpty()) {
+                    dml.setData(data);
+                }
+                if (!old.isEmpty()) {
+                    dml.setOld(old);
+                }
+            }
+        }
+
+        return dmls;
+    }
+
+    public static List<Dml> flatMessage2Dml(String destination, List<FlatMessage> flatMessages) {
+        List<Dml> dmls = new ArrayList<Dml>(flatMessages.size());
+        for (FlatMessage flatMessage : flatMessages) {
+            Dml dml = flatMessage2Dml(destination, flatMessage);
+            if (dml != null) {
+                dmls.add(dml);
+            }
+        }
+
+        return dmls;
+    }
+
+    public static Dml flatMessage2Dml(String destination, FlatMessage flatMessage) {
+        if (flatMessage == null) {
+            return null;
+        }
+        Dml dml = new Dml();
+        dml.setDestination(destination);
+        dml.setDatabase(flatMessage.getDatabase());
+        dml.setTable(flatMessage.getTable());
+        dml.setType(flatMessage.getType());
+        dml.setTs(flatMessage.getTs());
+        dml.setEs(flatMessage.getEs());
+        dml.setSql(flatMessage.getSql());
+        if (flatMessage.getSqlType() == null || flatMessage.getMysqlType() == null) {
+            throw new RuntimeException("SqlType or mysqlType is null");
+        }
+        List<Map<String, String>> data = flatMessage.getData();
+        if (data != null) {
+            dml.setData(changeRows(data, flatMessage.getSqlType(), flatMessage.getMysqlType()));
+        }
+        List<Map<String, String>> old = flatMessage.getOld();
+        if (old != null) {
+            dml.setOld(changeRows(old, flatMessage.getSqlType(), flatMessage.getMysqlType()));
+        }
+        return dml;
+    }
+
+    private static List<Map<String, Object>> changeRows(List<Map<String, String>> rows, Map<String, Integer> sqlTypes,
+                                                        Map<String, String> mysqlTypes) {
+        List<Map<String, Object>> result = new ArrayList<>();
+        for (Map<String, String> row : rows) {
+            Map<String, Object> resultRow = new LinkedHashMap<>();
+            for (Map.Entry<String, String> entry : row.entrySet()) {
+                String columnName = entry.getKey();
+                String columnValue = entry.getValue();
+
+                Integer sqlType = sqlTypes.get(columnName);
+                if (sqlType == null) {
+                    continue;
+                }
+
+                String mysqlType = mysqlTypes.get(columnName);
+                if (mysqlType == null) {
+                    continue;
+                }
+
+                Object finalValue = JdbcTypeUtil.typeConvert(columnName, columnValue, sqlType, mysqlType);
+                resultRow.put(columnName, finalValue);
+            }
+            result.add(resultRow);
+        }
+        return result;
+    }
+}

+ 62 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/OuterAdapterConfig.java

@@ -0,0 +1,62 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.util.Map;
+
+/**
+ * 外部适配器配置信息类
+ *
+ * @author rewerma 2018-8-18 下午10:15:12
+ * @version 1.0.0
+ */
+public class OuterAdapterConfig {
+
+    private String              name;       // 适配器名称, 如: logger, hbase, es
+
+    private String              key;        // 适配器唯一键
+
+    private String              hosts;      // 适配器内部的地址, 比如对应es该参数可以填写es的server地址
+
+    private String              zkHosts;    // 适配器内部的ZK地址, 比如对应HBase该参数可以填写HBase对应的ZK地址
+
+    private Map<String, String> properties; // 其余参数, 可填写适配器中的所需的配置信息
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getKey() {
+        return key;
+    }
+
+    public void setKey(String key) {
+        this.key = key;
+    }
+
+    public String getHosts() {
+        return hosts;
+    }
+
+    public void setHosts(String hosts) {
+        this.hosts = hosts;
+    }
+
+    public Map<String, String> getProperties() {
+        return properties;
+    }
+
+    public void setProperties(Map<String, String> properties) {
+        this.properties = properties;
+    }
+
+    public String getZkHosts() {
+        return zkHosts;
+    }
+
+    public void setZkHosts(String zkHosts) {
+        this.zkHosts = zkHosts;
+    }
+}

+ 57 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Result.java

@@ -0,0 +1,57 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * 用于rest的结果返回对象
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class Result implements Serializable {
+
+    private static final long serialVersionUID = -3276409502352405716L;
+    public Integer code = 20000;
+    public Object  data;
+    public String  message;
+    public Date    sysTime;
+
+    public static Result createSuccess(String message) {
+        Result result = new Result();
+        result.setMessage(message);
+        return result;
+    }
+
+    public Integer getCode() {
+        return code;
+    }
+
+    public void setCode(Integer code) {
+        this.code = code;
+    }
+
+    public Object getData() {
+        return data;
+    }
+
+    public void setData(Object data) {
+        this.data = data;
+    }
+
+    public String getMessage() {
+        return message;
+    }
+
+    public void setMessage(String message) {
+        this.message = message;
+    }
+
+    public Date getSysTime() {
+        return sysTime;
+    }
+
+    public void setSysTime(Date sysTime) {
+        this.sysTime = sysTime;
+    }
+}

+ 22 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/SPI.java

@@ -0,0 +1,22 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * SPI装载器注解
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Documented
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE })
+public @interface SPI {
+
+    // Default SPI name
+    String value() default "";
+}

+ 88 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/URLClassExtensionLoader.java

@@ -0,0 +1,88 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.IOException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.Enumeration;
+import java.util.NoSuchElementException;
+
+public class URLClassExtensionLoader extends URLClassLoader {
+    public URLClassExtensionLoader(URL[] urls) {
+        super(urls);
+    }
+
+    @Override
+    public Class<?> loadClass(String name) throws ClassNotFoundException {
+        Class<?> c = findLoadedClass(name);
+        if (c != null) {
+            return c;
+        }
+
+        if (name.startsWith("java.") || name.startsWith("org.slf4j.")
+                || name.startsWith("org.apache.logging")
+                || name.startsWith("org.apache.commons.logging.")) {
+            // || name.startsWith("org.apache.hadoop."))
+            // {
+            c = super.loadClass(name);
+        }
+        if (c != null) return c;
+
+        try {
+            // 先加载jar内的class,可避免jar冲突
+            c = findClass(name);
+        } catch (ClassNotFoundException e) {
+            c = null;
+        }
+        if (c != null) {
+            return c;
+        }
+
+        return super.loadClass(name);
+    }
+
+    @Override
+    public Enumeration<URL> getResources(String name) throws IOException {
+        @SuppressWarnings("unchecked")
+        Enumeration<URL>[] tmp = (Enumeration<URL>[]) new Enumeration<?>[2];
+
+        tmp[0] = findResources(name); // local class
+        // path first
+        // tmp[1] = super.getResources(name);
+
+        return new CompoundEnumeration<>(tmp);
+    }
+
+    private static class CompoundEnumeration<E> implements Enumeration<E> {
+
+        private Enumeration<E>[] enums;
+        private int              index = 0;
+
+        public CompoundEnumeration(Enumeration<E>[] enums){
+            this.enums = enums;
+        }
+
+        private boolean next() {
+            while (this.index < this.enums.length) {
+                if (this.enums[this.index] != null && this.enums[this.index].hasMoreElements()) {
+                    return true;
+                }
+
+                ++this.index;
+            }
+
+            return false;
+        }
+
+        public boolean hasMoreElements() {
+            return this.next();
+        }
+
+        public E nextElement() {
+            if (!this.next()) {
+                throw new NoSuchElementException();
+            } else {
+                return this.enums[this.index].nextElement();
+            }
+        }
+    }
+}

+ 120 - 0
client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java

@@ -0,0 +1,120 @@
+package com.alibaba.otter.canal.client.adapter.support;
+
+import java.io.File;
+import java.net.URL;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.function.Consumer;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Util {
+
+    private static final Logger logger = LoggerFactory.getLogger(Util.class);
+
+    /**
+     * 通过DS执行sql
+     */
+    public static Object sqlRS(DataSource ds, String sql, Function<ResultSet, Object> fun) throws SQLException {
+        Connection conn = null;
+        Statement stmt = null;
+        ResultSet rs = null;
+        try {
+            conn = ds.getConnection();
+            stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+            stmt.setFetchSize(Integer.MIN_VALUE);
+            rs = stmt.executeQuery(sql);
+            return fun.apply(rs);
+        } finally {
+            if (rs != null) {
+                try {
+                    rs.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+            if (stmt != null) {
+                try {
+                    stmt.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+            if (conn != null) {
+                try {
+                    conn.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+        }
+    }
+
+    /**
+     * sql执行获取resultSet
+     *
+     * @param conn sql connection
+     * @param sql sql
+     * @param consumer 回调方法
+     */
+    public static void sqlRS(Connection conn, String sql, Consumer<ResultSet> consumer) {
+        Statement stmt = null;
+        ResultSet rs = null;
+        try {
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery(sql);
+            consumer.accept(rs);
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        } finally {
+            if (rs != null) {
+                try {
+                    rs.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+            if (stmt != null) {
+                try {
+                    stmt.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+        }
+    }
+
+    public static File getConfDirPath() {
+        return getConfDirPath("");
+    }
+
+    public static File getConfDirPath(String subConf) {
+        URL url = Util.class.getClassLoader().getResource("");
+        String path;
+        if (url != null) {
+            path = url.getPath();
+        } else {
+            path = new File("").getAbsolutePath();
+        }
+        File file = null;
+        if (path != null) {
+            file = new File(
+                path + ".." + File.separator + Constant.CONF_DIR + File.separator + StringUtils.trimToEmpty(subConf));
+            if (!file.exists()) {
+                file = new File(path + StringUtils.trimToEmpty(subConf));
+            }
+        }
+        if (file == null || !file.exists()) {
+            throw new RuntimeException("Config dir not found.");
+        }
+
+        return file;
+    }
+}

+ 95 - 0
client-adapter/elasticsearch/pom.xml

@@ -0,0 +1,95 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.elasticsearch</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter elasticsearch module for otter ${project.version}</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.common</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.19</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba.fastsql</groupId>
+            <artifactId>fastsql</artifactId>
+            <version>2.0.0_preview_644</version>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>6.2.3</version>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch.client</groupId>
+            <artifactId>transport</artifactId>
+            <version>6.2.3</version>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <configuration>
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                        <configuration>
+                            <tasks>
+                                <copy todir="${project.basedir}/../launcher/target/classes/es" overwrite="true">
+                                    <fileset dir="${project.basedir}/target/classes/es" erroronmissingdir="true">
+                                        <include name="*.yml" />
+                                    </fileset>
+                                </copy>
+                            </tasks>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

+ 227 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/ESAdapter.java

@@ -0,0 +1,227 @@
+package com.alibaba.otter.canal.client.adapter.es;
+
+import java.net.InetAddress;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.transport.client.PreBuiltTransportClient;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfigLoader;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SqlParser;
+import com.alibaba.otter.canal.client.adapter.es.monitor.ESConfigMonitor;
+import com.alibaba.otter.canal.client.adapter.es.service.ESEtlService;
+import com.alibaba.otter.canal.client.adapter.es.service.ESSyncService;
+import com.alibaba.otter.canal.client.adapter.es.support.ESTemplate;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+import com.alibaba.otter.canal.client.adapter.support.SPI;
+
+/**
+ * ES外部适配器
+ *
+ * @author rewerma 2018-10-20
+ * @version 1.0.0
+ */
+@SPI("es")
+public class ESAdapter implements OuterAdapter {
+
+    private Map<String, ESSyncConfig>              esSyncConfig        = new ConcurrentHashMap<>(); // 文件名对应配置
+    private Map<String, Map<String, ESSyncConfig>> dbTableEsSyncConfig = new ConcurrentHashMap<>(); // schema-table对应配置
+
+    private TransportClient                        transportClient;
+
+    private ESSyncService                          esSyncService;
+
+    private ESConfigMonitor                        esConfigMonitor;
+
+    public TransportClient getTransportClient() {
+        return transportClient;
+    }
+
+    public ESSyncService getEsSyncService() {
+        return esSyncService;
+    }
+
+    public Map<String, ESSyncConfig> getEsSyncConfig() {
+        return esSyncConfig;
+    }
+
+    public Map<String, Map<String, ESSyncConfig>> getDbTableEsSyncConfig() {
+        return dbTableEsSyncConfig;
+    }
+
+    @Override
+    public void init(OuterAdapterConfig configuration) {
+        try {
+            Map<String, ESSyncConfig> esSyncConfigTmp = ESSyncConfigLoader.load();
+            // 过滤不匹配的key的配置
+            esSyncConfigTmp.forEach((key, config) -> {
+                if ((config.getOuterAdapterKey() == null && configuration.getKey() == null)
+                    || (config.getOuterAdapterKey() != null && config.getOuterAdapterKey()
+                        .equalsIgnoreCase(configuration.getKey()))) {
+                    esSyncConfig.put(key, config);
+                }
+            });
+
+            for (Map.Entry<String, ESSyncConfig> entry : esSyncConfig.entrySet()) {
+                String configName = entry.getKey();
+                ESSyncConfig config = entry.getValue();
+                SchemaItem schemaItem = SqlParser.parse(config.getEsMapping().getSql());
+                config.getEsMapping().setSchemaItem(schemaItem);
+
+                DruidDataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+                if (dataSource == null || dataSource.getUrl() == null) {
+                    throw new RuntimeException("No data source found: " + config.getDataSourceKey());
+                }
+                Pattern pattern = Pattern.compile(".*:(.*)://.*/(.*)\\?.*$");
+                Matcher matcher = pattern.matcher(dataSource.getUrl());
+                if (!matcher.find()) {
+                    throw new RuntimeException("Not found the schema of jdbc-url: " + config.getDataSourceKey());
+                }
+                String schema = matcher.group(2);
+
+                schemaItem.getAliasTableItems()
+                    .values()
+                    .forEach(tableItem -> {
+                        Map<String, ESSyncConfig> esSyncConfigMap = dbTableEsSyncConfig.computeIfAbsent(schema
+                                                                                                        + "-"
+                                                                                                        + tableItem.getTableName(),
+                            k -> new HashMap<>());
+                        esSyncConfigMap.put(configName, config);
+                    });
+            }
+
+            Map<String, String> properties = configuration.getProperties();
+            Settings.Builder settingBuilder = Settings.builder();
+            properties.forEach(settingBuilder::put);
+            Settings settings = settingBuilder.build();
+            transportClient = new PreBuiltTransportClient(settings);
+            String[] hostArray = configuration.getHosts().split(",");
+            for (String host : hostArray) {
+                int i = host.indexOf(":");
+                transportClient.addTransportAddress(new TransportAddress(InetAddress.getByName(host.substring(0, i)),
+                    Integer.parseInt(host.substring(i + 1))));
+            }
+            ESTemplate esTemplate = new ESTemplate(transportClient);
+            esSyncService = new ESSyncService(esTemplate);
+
+            esConfigMonitor = new ESConfigMonitor();
+            esConfigMonitor.init(this);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void sync(List<Dml> dmls) {
+        for (Dml dml : dmls) {
+            sync(dml);
+        }
+    }
+
+    public void sync(Dml dml) {
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> configMap = dbTableEsSyncConfig.get(database + "-" + table);
+        esSyncService.sync(configMap.values(), dml);
+    }
+
+    @Override
+    public EtlResult etl(String task, List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        ESSyncConfig config = esSyncConfig.get(task);
+        if (config != null) {
+            DataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            ESEtlService esEtlService = new ESEtlService(transportClient, config);
+            if (dataSource != null) {
+                return esEtlService.importData(params, false);
+            } else {
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("DataSource not found");
+                return etlResult;
+            }
+        } else {
+            StringBuilder resultMsg = new StringBuilder();
+            boolean resSuccess = true;
+            // ds不为空说明传入的是datasourceKey
+            for (ESSyncConfig configTmp : esSyncConfig.values()) {
+                // 取所有的destination为task的配置
+                if (configTmp.getDestination().equals(task)) {
+                    ESEtlService esEtlService = new ESEtlService(transportClient, configTmp);
+                    EtlResult etlRes = esEtlService.importData(params, false);
+                    if (!etlRes.getSucceeded()) {
+                        resSuccess = false;
+                        resultMsg.append(etlRes.getErrorMessage()).append("\n");
+                    } else {
+                        resultMsg.append(etlRes.getResultMessage()).append("\n");
+                    }
+                }
+            }
+            if (resultMsg.length() > 0) {
+                etlResult.setSucceeded(resSuccess);
+                if (resSuccess) {
+                    etlResult.setResultMessage(resultMsg.toString());
+                } else {
+                    etlResult.setErrorMessage(resultMsg.toString());
+                }
+                return etlResult;
+            }
+        }
+        etlResult.setSucceeded(false);
+        etlResult.setErrorMessage("Task not found");
+        return etlResult;
+    }
+
+    @Override
+    public Map<String, Object> count(String task) {
+        ESSyncConfig config = esSyncConfig.get(task);
+        ESMapping mapping = config.getEsMapping();
+        SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+            .setTypes(mapping.get_type())
+            .setSize(0)
+            .get();
+
+        long rowCount = response.getHits().getTotalHits();
+        Map<String, Object> res = new LinkedHashMap<>();
+        res.put("esIndex", mapping.get_index());
+        res.put("count", rowCount);
+        return res;
+    }
+
+    @Override
+    public void destroy() {
+        if (transportClient != null) {
+            transportClient.close();
+        }
+    }
+
+    @Override
+    public String getDestination(String task) {
+        if (esConfigMonitor != null) {
+            esConfigMonitor.destroy();
+        }
+
+        ESSyncConfig config = esSyncConfig.get(task);
+        if (config != null) {
+            return config.getDestination();
+        }
+        return null;
+    }
+}

+ 194 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfig.java

@@ -0,0 +1,194 @@
+package com.alibaba.otter.canal.client.adapter.es.config;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * ES 映射配置
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESSyncConfig {
+
+    private String    dataSourceKey;   // 数据源key
+
+    private String    outerAdapterKey; // adapter key
+
+    private String    destination;     // canal destination
+
+    private ESMapping esMapping;
+
+    public void validate() {
+        if (esMapping._index == null) {
+            throw new NullPointerException("esMapping._index");
+        }
+        if (esMapping._type == null) {
+            throw new NullPointerException("esMapping._type");
+        }
+        if (esMapping._id == null && esMapping.pk == null) {
+            throw new NullPointerException("esMapping._id and esMapping.pk");
+        }
+        if (esMapping.sql == null) {
+            throw new NullPointerException("esMapping.sql");
+        }
+    }
+
+    public String getDataSourceKey() {
+        return dataSourceKey;
+    }
+
+    public void setDataSourceKey(String dataSourceKey) {
+        this.dataSourceKey = dataSourceKey;
+    }
+
+    public String getOuterAdapterKey() {
+        return outerAdapterKey;
+    }
+
+    public void setOuterAdapterKey(String outerAdapterKey) {
+        this.outerAdapterKey = outerAdapterKey;
+    }
+
+    public String getDestination() {
+        return destination;
+    }
+
+    public void setDestination(String destination) {
+        this.destination = destination;
+    }
+
+    public ESMapping getEsMapping() {
+        return esMapping;
+    }
+
+    public void setEsMapping(ESMapping esMapping) {
+        this.esMapping = esMapping;
+    }
+
+    public static class ESMapping {
+
+        private String              _index;
+        private String              _type;
+        private String              _id;
+        private String              pk;
+        private String              parent;
+        private String              sql;
+        // 对象字段, 例: objFields:
+        // - _labels: array:;
+        private Map<String, String> objFields       = new LinkedHashMap<>();
+        private List<String>        skips           = new ArrayList<>();
+        private int                 commitBatch     = 1000;
+        private String              etlCondition;
+        private boolean             syncByTimestamp = false;                // 是否按时间戳定时同步
+        private Long                syncInterval;                           // 同步时间间隔
+
+        private SchemaItem          schemaItem;                             // sql解析结果模型
+
+        public String get_index() {
+            return _index;
+        }
+
+        public void set_index(String _index) {
+            this._index = _index;
+        }
+
+        public String get_type() {
+            return _type;
+        }
+
+        public void set_type(String _type) {
+            this._type = _type;
+        }
+
+        public String get_id() {
+            return _id;
+        }
+
+        public void set_id(String _id) {
+            this._id = _id;
+        }
+
+        public String getPk() {
+            return pk;
+        }
+
+        public void setPk(String pk) {
+            this.pk = pk;
+        }
+
+        public String getParent() {
+            return parent;
+        }
+
+        public void setParent(String parent) {
+            this.parent = parent;
+        }
+
+        public Map<String, String> getObjFields() {
+            return objFields;
+        }
+
+        public void setObjFields(Map<String, String> objFields) {
+            this.objFields = objFields;
+        }
+
+        public List<String> getSkips() {
+            return skips;
+        }
+
+        public void setSkips(List<String> skips) {
+            this.skips = skips;
+        }
+
+        public String getSql() {
+            return sql;
+        }
+
+        public void setSql(String sql) {
+            this.sql = sql;
+        }
+
+        public int getCommitBatch() {
+            return commitBatch;
+        }
+
+        public void setCommitBatch(int commitBatch) {
+            this.commitBatch = commitBatch;
+        }
+
+        public String getEtlCondition() {
+            return etlCondition;
+        }
+
+        public void setEtlCondition(String etlCondition) {
+            this.etlCondition = etlCondition;
+        }
+
+        public Long getSyncInterval() {
+            return syncInterval;
+        }
+
+        public void setSyncInterval(Long syncInterval) {
+            this.syncInterval = syncInterval;
+        }
+
+        public boolean isSyncByTimestamp() {
+            return syncByTimestamp;
+        }
+
+        public void setSyncByTimestamp(boolean syncByTimestamp) {
+            this.syncByTimestamp = syncByTimestamp;
+        }
+
+        public SchemaItem getSchemaItem() {
+            return schemaItem;
+        }
+
+        public void setSchemaItem(SchemaItem schemaItem) {
+            this.schemaItem = schemaItem;
+        }
+    }
+}

+ 41 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/ESSyncConfigLoader.java

@@ -0,0 +1,41 @@
+package com.alibaba.otter.canal.client.adapter.es.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+
+/**
+ * ES 配置装载器
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESSyncConfigLoader {
+
+    private static Logger logger = LoggerFactory.getLogger(ESSyncConfigLoader.class);
+
+    public static synchronized Map<String, ESSyncConfig> load() {
+        logger.info("## Start loading es mapping config ... ");
+
+        Map<String, ESSyncConfig> esSyncConfig = new LinkedHashMap<>();
+
+        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("es");
+        configContentMap.forEach((fileName, content) -> {
+            ESSyncConfig config = new Yaml().loadAs(content, ESSyncConfig.class);
+            try {
+                config.validate();
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
+            }
+            esSyncConfig.put(fileName, config);
+        });
+
+        logger.info("## ES mapping config loaded");
+        return esSyncConfig;
+    }
+}

+ 422 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SchemaItem.java

@@ -0,0 +1,422 @@
+package com.alibaba.otter.canal.client.adapter.es.config;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+
+/**
+ * ES 映射配置视图
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class SchemaItem {
+
+    private Map<String, TableItem>                aliasTableItems = new LinkedHashMap<>(); // 别名对应表名
+    private Map<String, FieldItem>                selectFields    = new LinkedHashMap<>(); // 查询字段
+    private String                                sql;
+
+    private volatile Map<String, List<TableItem>> tableItemAliases;
+    private volatile Map<String, List<FieldItem>> columnFields;
+    private volatile Boolean                      allFieldsSimple;
+
+    public void init() {
+        this.getTableItemAliases();
+        this.getColumnFields();
+        this.isAllFieldsSimple();
+        aliasTableItems.values().forEach(tableItem -> {
+            tableItem.getRelationTableFields();
+            tableItem.getRelationSelectFieldItems();
+        });
+    }
+
+    public Map<String, TableItem> getAliasTableItems() {
+        return aliasTableItems;
+    }
+
+    public void setAliasTableItems(Map<String, TableItem> aliasTableItems) {
+        this.aliasTableItems = aliasTableItems;
+    }
+
+    public String getSql() {
+        return sql;
+    }
+
+    public void setSql(String sql) {
+        this.sql = sql;
+    }
+
+    public Map<String, FieldItem> getSelectFields() {
+        return selectFields;
+    }
+
+    public void setSelectFields(Map<String, FieldItem> selectFields) {
+        this.selectFields = selectFields;
+    }
+
+    public Map<String, List<TableItem>> getTableItemAliases() {
+        if (tableItemAliases == null) {
+            synchronized (SchemaItem.class) {
+                if (tableItemAliases == null) {
+                    tableItemAliases = new LinkedHashMap<>();
+                    aliasTableItems.forEach((alias, tableItem) -> {
+                        List<TableItem> aliases = tableItemAliases
+                            .computeIfAbsent(tableItem.getTableName().toLowerCase(), k -> new ArrayList<>());
+                        aliases.add(tableItem);
+                    });
+                }
+            }
+        }
+        return tableItemAliases;
+    }
+
+    public Map<String, List<FieldItem>> getColumnFields() {
+        if (columnFields == null) {
+            synchronized (SchemaItem.class) {
+                if (columnFields == null) {
+                    columnFields = new LinkedHashMap<>();
+                    getSelectFields()
+                        .forEach((fieldName, fieldItem) -> fieldItem.getColumnItems().forEach(columnItem -> {
+                            TableItem tableItem = getAliasTableItems().get(columnItem.getOwner());
+                            // if (!tableItem.isSubQuery()) {
+                            List<FieldItem> fieldItems = columnFields.computeIfAbsent(
+                                columnItem.getOwner() + "." + columnItem.getColumnName(),
+                                k -> new ArrayList<>());
+                            fieldItems.add(fieldItem);
+                            // } else {
+                            // tableItem.getSubQueryFields().forEach(subQueryField -> {
+                            // List<FieldItem> fieldItems = columnFields.computeIfAbsent(
+                            // columnItem.getOwner() + "." + subQueryField.getColumn().getColumnName(),
+                            // k -> new ArrayList<>());
+                            // fieldItems.add(fieldItem);
+                            // });
+                            // }
+                        }));
+                }
+            }
+        }
+        return columnFields;
+    }
+
+    public boolean isAllFieldsSimple() {
+        if (allFieldsSimple == null) {
+            synchronized (SchemaItem.class) {
+                if (allFieldsSimple == null) {
+                    allFieldsSimple = true;
+
+                    for (FieldItem fieldItem : getSelectFields().values()) {
+                        if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                            allFieldsSimple = false;
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+
+        return allFieldsSimple;
+    }
+
+    public TableItem getMainTable() {
+        if (!aliasTableItems.isEmpty()) {
+            return aliasTableItems.values().iterator().next();
+        } else {
+            return null;
+        }
+    }
+
+    public FieldItem getIdFieldItem(ESMapping mapping) {
+        if (mapping.get_id() != null) {
+            return getSelectFields().get(mapping.get_id());
+        } else {
+            return getSelectFields().get(mapping.getPk());
+        }
+    }
+
+    public static class TableItem {
+
+        private SchemaItem                               schemaItem;
+
+        private String                                   schema;
+        private String                                   tableName;
+        private String                                   alias;
+        private String                                   subQuerySql;
+        private List<FieldItem>                          subQueryFields = new ArrayList<>();
+        private List<RelationFieldsPair>                 relationFields = new ArrayList<>();
+
+        private boolean                                  main;
+        private boolean                                  subQuery;
+
+        private volatile Map<FieldItem, List<FieldItem>> relationTableFields;               // 当前表关联条件字段对应主表查询字段
+        private volatile List<FieldItem>                 relationSelectFieldItems;          // 子表所在主表的查询字段
+
+        public TableItem(SchemaItem schemaItem){
+            this.schemaItem = schemaItem;
+        }
+
+        public SchemaItem getSchemaItem() {
+            return schemaItem;
+        }
+
+        public void setSchemaItem(SchemaItem schemaItem) {
+            this.schemaItem = schemaItem;
+        }
+
+        public String getSchema() {
+            return schema;
+        }
+
+        public void setSchema(String schema) {
+            this.schema = schema;
+        }
+
+        public String getTableName() {
+            return tableName;
+        }
+
+        public void setTableName(String tableName) {
+            this.tableName = tableName;
+        }
+
+        public String getAlias() {
+            return alias;
+        }
+
+        public void setAlias(String alias) {
+            this.alias = alias;
+        }
+
+        public String getSubQuerySql() {
+            return subQuerySql;
+        }
+
+        public void setSubQuerySql(String subQuerySql) {
+            this.subQuerySql = subQuerySql;
+        }
+
+        public boolean isMain() {
+            return main;
+        }
+
+        public void setMain(boolean main) {
+            this.main = main;
+        }
+
+        public boolean isSubQuery() {
+            return subQuery;
+        }
+
+        public void setSubQuery(boolean subQuery) {
+            this.subQuery = subQuery;
+        }
+
+        public List<FieldItem> getSubQueryFields() {
+            return subQueryFields;
+        }
+
+        public void setSubQueryFields(List<FieldItem> subQueryFields) {
+            this.subQueryFields = subQueryFields;
+        }
+
+        public List<RelationFieldsPair> getRelationFields() {
+            return relationFields;
+        }
+
+        public void setRelationFields(List<RelationFieldsPair> relationFields) {
+            this.relationFields = relationFields;
+        }
+
+        public Map<FieldItem, List<FieldItem>> getRelationTableFields() {
+            if (relationTableFields == null) {
+                synchronized (SchemaItem.class) {
+                    if (relationTableFields == null) {
+                        relationTableFields = new LinkedHashMap<>();
+
+                        getRelationFields().forEach(relationFieldsPair -> {
+                            FieldItem leftFieldItem = relationFieldsPair.getLeftFieldItem();
+                            FieldItem rightFieldItem = relationFieldsPair.getRightFieldItem();
+                            FieldItem currentTableRelField = null;
+                            if (getAlias().equals(leftFieldItem.getOwner())) {
+                                currentTableRelField = leftFieldItem;
+                            } else if (getAlias().equals(rightFieldItem.getOwner())) {
+                                currentTableRelField = rightFieldItem;
+                            }
+
+                            if (currentTableRelField != null) {
+                                List<FieldItem> selectFieldItem = getSchemaItem().getColumnFields()
+                                    .get(leftFieldItem.getOwner() + "." + leftFieldItem.getColumn().getColumnName());
+                                if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
+                                    relationTableFields.put(currentTableRelField, selectFieldItem);
+                                } else {
+                                    selectFieldItem = getSchemaItem().getColumnFields()
+                                        .get(rightFieldItem.getOwner() + "."
+                                             + rightFieldItem.getColumn().getColumnName());
+                                    if (selectFieldItem != null && !selectFieldItem.isEmpty()) {
+                                        relationTableFields.put(currentTableRelField, selectFieldItem);
+                                    } else {
+                                        throw new UnsupportedOperationException(
+                                            "Relation condition column must in select columns.");
+                                    }
+                                }
+                            }
+                        });
+                    }
+                }
+            }
+            return relationTableFields;
+        }
+
+        public List<FieldItem> getRelationSelectFieldItems() {
+            if (relationSelectFieldItems == null) {
+                synchronized (SchemaItem.class) {
+                    if (relationSelectFieldItems == null) {
+                        relationSelectFieldItems = new ArrayList<>();
+                        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+                            if (fieldItem.getOwners().contains(getAlias())) {
+                                relationSelectFieldItems.add(fieldItem);
+                            }
+                        }
+                    }
+                }
+            }
+            return relationSelectFieldItems;
+        }
+    }
+
+    public static class RelationFieldsPair {
+
+        private FieldItem leftFieldItem;
+        private FieldItem rightFieldItem;
+
+        public RelationFieldsPair(FieldItem leftFieldItem, FieldItem rightFieldItem){
+            this.leftFieldItem = leftFieldItem;
+            this.rightFieldItem = rightFieldItem;
+        }
+
+        public FieldItem getLeftFieldItem() {
+            return leftFieldItem;
+        }
+
+        public void setLeftFieldItem(FieldItem leftFieldItem) {
+            this.leftFieldItem = leftFieldItem;
+        }
+
+        public FieldItem getRightFieldItem() {
+            return rightFieldItem;
+        }
+
+        public void setRightFieldItem(FieldItem rightFieldItem) {
+            this.rightFieldItem = rightFieldItem;
+        }
+    }
+
+    public static class FieldItem {
+
+        private String           fieldName;
+        private List<ColumnItem> columnItems = new ArrayList<>();
+        private List<String>     owners      = new ArrayList<>();
+
+        private boolean          method;
+        private boolean          binaryOp;
+
+        public String getFieldName() {
+            return fieldName;
+        }
+
+        public void setFieldName(String fieldName) {
+            this.fieldName = fieldName;
+        }
+
+        public List<ColumnItem> getColumnItems() {
+            return columnItems;
+        }
+
+        public void setColumnItems(List<ColumnItem> columnItems) {
+            this.columnItems = columnItems;
+        }
+
+        public boolean isMethod() {
+            return method;
+        }
+
+        public void setMethod(boolean method) {
+            this.method = method;
+        }
+
+        public boolean isBinaryOp() {
+            return binaryOp;
+        }
+
+        public void setBinaryOp(boolean binaryOp) {
+            this.binaryOp = binaryOp;
+        }
+
+        public List<String> getOwners() {
+            return owners;
+        }
+
+        public void setOwners(List<String> owners) {
+            this.owners = owners;
+        }
+
+        public void addColumn(ColumnItem columnItem) {
+            columnItems.add(columnItem);
+        }
+
+        public ColumnItem getColumn() {
+            if (!columnItems.isEmpty()) {
+                return columnItems.get(0);
+            } else {
+                return null;
+            }
+        }
+
+        public String getOwner() {
+            if (!owners.isEmpty()) {
+                return owners.get(0);
+            } else {
+                return null;
+            }
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+
+            FieldItem fieldItem = (FieldItem) o;
+
+            return fieldName != null ? fieldName.equals(fieldItem.fieldName) : fieldItem.fieldName == null;
+        }
+
+        @Override
+        public int hashCode() {
+            return fieldName != null ? fieldName.hashCode() : 0;
+        }
+    }
+
+    public static class ColumnItem {
+
+        private String owner;
+        private String columnName;
+
+        public String getOwner() {
+            return owner;
+        }
+
+        public void setOwner(String owner) {
+            this.owner = owner;
+        }
+
+        public String getColumnName() {
+            return columnName;
+        }
+
+        public void setColumnName(String columnName) {
+            this.columnName = columnName;
+        }
+    }
+}

+ 210 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/config/SqlParser.java

@@ -0,0 +1,210 @@
+package com.alibaba.otter.canal.client.adapter.es.config;
+
+import static com.alibaba.fastsql.sql.ast.expr.SQLBinaryOperator.BooleanAnd;
+import static com.alibaba.fastsql.sql.ast.expr.SQLBinaryOperator.Equality;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import com.alibaba.fastsql.sql.SQLUtils;
+import com.alibaba.fastsql.sql.ast.SQLExpr;
+import com.alibaba.fastsql.sql.ast.expr.SQLBinaryOpExpr;
+import com.alibaba.fastsql.sql.ast.expr.SQLIdentifierExpr;
+import com.alibaba.fastsql.sql.ast.expr.SQLMethodInvokeExpr;
+import com.alibaba.fastsql.sql.ast.expr.SQLPropertyExpr;
+import com.alibaba.fastsql.sql.ast.statement.*;
+import com.alibaba.fastsql.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock;
+import com.alibaba.fastsql.sql.dialect.mysql.parser.MySqlStatementParser;
+import com.alibaba.fastsql.sql.parser.ParserException;
+import com.alibaba.fastsql.sql.parser.SQLStatementParser;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.RelationFieldsPair;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.TableItem;
+
+/**
+ * ES同步指定sql格式解析
+ * 
+ * @author rewerma 2018-10-26 下午03:45:49
+ * @version 1.0.0
+ */
+public class SqlParser {
+
+    /**
+     * 解析sql
+     * 
+     * @param sql sql
+     * @return 视图对象
+     */
+    public static SchemaItem parse(String sql) {
+        try {
+            SQLStatementParser parser = new MySqlStatementParser(sql);
+            SQLSelectStatement statement = (SQLSelectStatement) parser.parseStatement();
+            MySqlSelectQueryBlock sqlSelectQueryBlock = (MySqlSelectQueryBlock) statement.getSelect().getQuery();
+
+            SchemaItem schemaItem = new SchemaItem();
+            schemaItem.setSql(SQLUtils.toMySqlString(sqlSelectQueryBlock));
+            SQLTableSource sqlTableSource = sqlSelectQueryBlock.getFrom();
+            List<TableItem> tableItems = new ArrayList<>();
+            SqlParser.visitSelectTable(schemaItem, sqlTableSource, tableItems, null);
+            tableItems.forEach(tableItem -> schemaItem.getAliasTableItems().put(tableItem.getAlias(), tableItem));
+
+            List<FieldItem> fieldItems = collectSelectQueryFields(sqlSelectQueryBlock);
+            fieldItems.forEach(fieldItem -> schemaItem.getSelectFields().put(fieldItem.getFieldName(), fieldItem));
+
+            schemaItem.init();
+
+            if (schemaItem.getAliasTableItems().isEmpty() || schemaItem.getSelectFields().isEmpty()) {
+                throw new ParserException("Parse sql error");
+            }
+            return schemaItem;
+        } catch (Exception e) {
+            throw new ParserException();
+        }
+    }
+
+    /**
+     * 归集字段
+     * 
+     * @param sqlSelectQueryBlock sqlSelectQueryBlock
+     * @return 字段属性列表
+     */
+    private static List<FieldItem> collectSelectQueryFields(MySqlSelectQueryBlock sqlSelectQueryBlock) {
+        return sqlSelectQueryBlock.getSelectList().stream().map(selectItem -> {
+            FieldItem fieldItem = new FieldItem();
+            fieldItem.setFieldName(selectItem.getAlias());
+            visitColumn(selectItem.getExpr(), fieldItem);
+            return fieldItem;
+        }).collect(Collectors.toList());
+    }
+
+    /**
+     * 解析字段
+     * 
+     * @param expr sql expr
+     * @param fieldItem 字段属性
+     */
+    private static void visitColumn(SQLExpr expr, FieldItem fieldItem) {
+        if (expr instanceof SQLIdentifierExpr) {
+            // 无owner
+            SQLIdentifierExpr identifierExpr = (SQLIdentifierExpr) expr;
+            if (fieldItem.getFieldName() == null) {
+                fieldItem.setFieldName(identifierExpr.getName());
+            }
+            ColumnItem columnItem = new ColumnItem();
+            columnItem.setColumnName(identifierExpr.getName());
+            fieldItem.getOwners().add(null);
+            fieldItem.addColumn(columnItem);
+        } else if (expr instanceof SQLPropertyExpr) {
+            // 有owner
+            SQLPropertyExpr sqlPropertyExpr = (SQLPropertyExpr) expr;
+            if (fieldItem.getFieldName() == null) {
+                fieldItem.setFieldName(sqlPropertyExpr.getName());
+            }
+            fieldItem.getOwners().add(sqlPropertyExpr.getOwnernName());
+            ColumnItem columnItem = new ColumnItem();
+            columnItem.setColumnName(sqlPropertyExpr.getName());
+            columnItem.setOwner(sqlPropertyExpr.getOwnernName());
+            fieldItem.addColumn(columnItem);
+        } else if (expr instanceof SQLMethodInvokeExpr) {
+            SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr;
+            fieldItem.setMethod(true);
+            for (SQLExpr sqlExpr : methodInvokeExpr.getArguments()) {
+                visitColumn(sqlExpr, fieldItem);
+            }
+        } else if (expr instanceof SQLBinaryOpExpr) {
+            SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) expr;
+            fieldItem.setBinaryOp(true);
+            visitColumn(sqlBinaryOpExpr.getLeft(), fieldItem);
+            visitColumn(sqlBinaryOpExpr.getRight(), fieldItem);
+        }
+    }
+
+    /**
+     * 解析表
+     * 
+     * @param schemaItem 视图对象
+     * @param sqlTableSource sqlTableSource
+     * @param tableItems 表对象列表
+     * @param tableItemTmp 表对象(临时)
+     */
+    private static void visitSelectTable(SchemaItem schemaItem, SQLTableSource sqlTableSource,
+                                         List<TableItem> tableItems, TableItem tableItemTmp) {
+        if (sqlTableSource instanceof SQLExprTableSource) {
+            SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) sqlTableSource;
+            TableItem tableItem;
+            if (tableItemTmp != null) {
+                tableItem = tableItemTmp;
+            } else {
+                tableItem = new TableItem(schemaItem);
+            }
+            tableItem.setSchema(sqlExprTableSource.getSchema());
+            tableItem.setTableName(sqlExprTableSource.getTableName());
+            if (tableItem.getAlias() == null) {
+                tableItem.setAlias(sqlExprTableSource.getAlias());
+            }
+            if (tableItems.isEmpty()) {
+                // 第一张表为主表
+                tableItem.setMain(true);
+            }
+            tableItems.add(tableItem);
+        } else if (sqlTableSource instanceof SQLJoinTableSource) {
+            SQLJoinTableSource sqlJoinTableSource = (SQLJoinTableSource) sqlTableSource;
+            SQLTableSource leftTableSource = sqlJoinTableSource.getLeft();
+            visitSelectTable(schemaItem, leftTableSource, tableItems, null);
+            SQLTableSource rightTableSource = sqlJoinTableSource.getRight();
+            TableItem rightTableItem = new TableItem(schemaItem);
+            // 解析on条件字段
+            visitOnCondition(sqlJoinTableSource.getCondition(), rightTableItem);
+            visitSelectTable(schemaItem, rightTableSource, tableItems, rightTableItem);
+
+        } else if (sqlTableSource instanceof SQLSubqueryTableSource) {
+            SQLSubqueryTableSource subQueryTableSource = (SQLSubqueryTableSource) sqlTableSource;
+            MySqlSelectQueryBlock sqlSelectQuery = (MySqlSelectQueryBlock) subQueryTableSource.getSelect().getQuery();
+            TableItem tableItem;
+            if (tableItemTmp != null) {
+                tableItem = tableItemTmp;
+            } else {
+                tableItem = new TableItem(schemaItem);
+            }
+            tableItem.setAlias(subQueryTableSource.getAlias());
+            tableItem.setSubQuerySql(SQLUtils.toMySqlString(sqlSelectQuery));
+            tableItem.setSubQuery(true);
+            tableItem.setSubQueryFields(collectSelectQueryFields(sqlSelectQuery));
+            visitSelectTable(schemaItem, sqlSelectQuery.getFrom(), tableItems, tableItem);
+        }
+    }
+
+    /**
+     * 解析on条件
+     * 
+     * @param expr sql expr
+     * @param tableItem 表对象
+     */
+    private static void visitOnCondition(SQLExpr expr, TableItem tableItem) {
+        if (!(expr instanceof SQLBinaryOpExpr)) {
+            throw new UnsupportedOperationException();
+        }
+        SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) expr;
+        if (sqlBinaryOpExpr.getOperator() == BooleanAnd) {
+            visitOnCondition(sqlBinaryOpExpr.getLeft(), tableItem);
+            visitOnCondition(sqlBinaryOpExpr.getRight(), tableItem);
+        } else if (sqlBinaryOpExpr.getOperator() == Equality) {
+            FieldItem leftFieldItem = new FieldItem();
+            visitColumn(sqlBinaryOpExpr.getLeft(), leftFieldItem);
+            if (leftFieldItem.getColumnItems().size() != 1 || leftFieldItem.isMethod() || leftFieldItem.isBinaryOp()) {
+                throw new UnsupportedOperationException("Unsupported for complex of on-condition");
+            }
+            FieldItem rightFieldItem = new FieldItem();
+            visitColumn(sqlBinaryOpExpr.getRight(), rightFieldItem);
+            if (rightFieldItem.getColumnItems().size() != 1 || rightFieldItem.isMethod()
+                || rightFieldItem.isBinaryOp()) {
+                throw new UnsupportedOperationException("Unsupported for complex of on-condition");
+            }
+            tableItem.getRelationFields().add(new RelationFieldsPair(leftFieldItem, rightFieldItem));
+        } else {
+            throw new UnsupportedOperationException("Unsupported for complex of on-condition");
+        }
+    }
+}

+ 151 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/monitor/ESConfigMonitor.java

@@ -0,0 +1,151 @@
+package com.alibaba.otter.canal.client.adapter.es.monitor;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SqlParser;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+public class ESConfigMonitor {
+
+    private static final Logger   logger      = LoggerFactory.getLogger(ESConfigMonitor.class);
+
+    private static final String   adapterName = "es";
+
+    private ESAdapter             esAdapter;
+
+    private FileAlterationMonitor fileMonitor;
+
+    public void init(ESAdapter esAdapter) {
+        this.esAdapter = esAdapter;
+        File confDir = Util.getConfDirPath(adapterName);
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileCreate(File file) {
+            super.onFileCreate(file);
+            try {
+                // 加载新增的配置文件
+                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                ESSyncConfig config = new Yaml().loadAs(configContent, ESSyncConfig.class);
+                config.validate();
+                addConfigToCache(file, config);
+
+                logger.info("Add a new es mapping config: {} to canal adapter", file.getName());
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+
+            try {
+                if (esAdapter.getEsSyncConfig().containsKey(file.getName())) {
+                    // 加载配置文件
+                    String configContent = MappingConfigsLoader
+                        .loadConfig(adapterName + File.separator + file.getName());
+                    ESSyncConfig config = new Yaml().loadAs(configContent, ESSyncConfig.class);
+                    config.validate();
+                    if (esAdapter.getEsSyncConfig().containsKey(file.getName())) {
+                        deleteConfigFromCache(file);
+                    }
+                    addConfigToCache(file, config);
+
+                    logger.info("Change a es mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileDelete(File file) {
+            super.onFileDelete(file);
+
+            try {
+                if (esAdapter.getEsSyncConfig().containsKey(file.getName())) {
+                    deleteConfigFromCache(file);
+
+                    logger.info("Delete a es mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        private void addConfigToCache(File file, ESSyncConfig config) {
+            esAdapter.getEsSyncConfig().put(file.getName(), config);
+
+            SchemaItem schemaItem = SqlParser.parse(config.getEsMapping().getSql());
+            config.getEsMapping().setSchemaItem(schemaItem);
+
+            DruidDataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            if (dataSource == null || dataSource.getUrl() == null) {
+                throw new RuntimeException("No data source found: " + config.getDataSourceKey());
+            }
+            Pattern pattern = Pattern.compile(".*:(.*)://.*/(.*)\\?.*$");
+            Matcher matcher = pattern.matcher(dataSource.getUrl());
+            if (!matcher.find()) {
+                throw new RuntimeException("Not found the schema of jdbc-url: " + config.getDataSourceKey());
+            }
+            String schema = matcher.group(2);
+
+            schemaItem.getAliasTableItems().values().forEach(tableItem -> {
+                Map<String, ESSyncConfig> esSyncConfigMap = esAdapter.getDbTableEsSyncConfig()
+                    .computeIfAbsent(schema + "-" + tableItem.getTableName(), k -> new HashMap<>());
+                esSyncConfigMap.put(file.getName(), config);
+            });
+
+        }
+
+        private void deleteConfigFromCache(File file) {
+            esAdapter.getEsSyncConfig().remove(file.getName());
+            for (Map<String, ESSyncConfig> configMap : esAdapter.getDbTableEsSyncConfig().values()) {
+                if (configMap != null) {
+                    configMap.remove(file.getName());
+                }
+            }
+
+        }
+    }
+}

+ 286 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESEtlService.java

@@ -0,0 +1,286 @@
+package com.alibaba.otter.canal.client.adapter.es.service;
+
+import java.util.*;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.es.support.ESSyncUtil;
+import com.alibaba.otter.canal.client.adapter.es.support.ESTemplate;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.google.common.base.Joiner;
+
+/**
+ * ES ETL Service
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESEtlService {
+
+    private static Logger   logger = LoggerFactory.getLogger(ESEtlService.class);
+
+    private TransportClient transportClient;
+    private ESTemplate      esTemplate;
+    private ESSyncConfig    config;
+
+    public ESEtlService(TransportClient transportClient, ESSyncConfig config){
+        this.transportClient = transportClient;
+        this.esTemplate = new ESTemplate(transportClient);
+        this.config = config;
+    }
+
+    public EtlResult importData(List<String> params, boolean bulk) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong impCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String esIndex = "";
+        if (config == null) {
+            logger.warn("esSycnCofnig is null, etl go end !");
+            etlResult.setErrorMessage("esSycnCofnig is null, etl go end !");
+            return etlResult;
+        }
+
+        ESMapping mapping = config.getEsMapping();
+
+        esIndex = mapping.get_index();
+        DruidDataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        Pattern pattern = Pattern.compile(".*:(.*)://.*/(.*)\\?.*$");
+        Matcher matcher = pattern.matcher(dataSource.getUrl());
+        if (!matcher.find()) {
+            throw new RuntimeException("Not found the schema of jdbc-url: " + config.getDataSourceKey());
+        }
+        String schema = matcher.group(2);
+
+        logger.info("etl from db: {},  to es index: {}", schema, esIndex);
+        long start = System.currentTimeMillis();
+        try {
+            String sql = mapping.getSql();
+
+            // 拼接条件
+            if (mapping.getEtlCondition() != null && params != null) {
+                String etlCondition = mapping.getEtlCondition();
+                int size = params.size();
+                for (int i = 0; i < size; i++) {
+                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+                }
+
+                sql += " " + etlCondition;
+            }
+
+            if (logger.isDebugEnabled()) {
+                logger.debug("etl sql : {}", mapping.getSql());
+            }
+
+            if (bulk) {
+                // 获取总数
+                String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+                long cnt = (Long) ESSyncUtil.sqlRS(dataSource, countSql, rs -> {
+                    Long count = null;
+                    try {
+                        if (rs.next()) {
+                            count = ((Number) rs.getObject(1)).longValue();
+                        }
+                    } catch (Exception e) {
+                        logger.error(e.getMessage(), e);
+                    }
+                    return count == null ? 0L : count;
+                });
+
+                // 当大于1万条记录时开启多线程
+                if (cnt >= 10000) {
+                    int threadCount = 3; // TODO 从配置读取默认为3
+                    long perThreadCnt = cnt / threadCount;
+                    ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                    List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                    for (int i = 0; i < threadCount; i++) {
+                        long offset = i * perThreadCnt;
+                        Long size = null;
+                        if (i != threadCount - 1) {
+                            size = perThreadCnt;
+                        }
+                        String sqlFinal;
+                        if (size != null) {
+                            sqlFinal = sql + " LIMIT " + offset + "," + size;
+                        } else {
+                            sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                        }
+                        Future<Boolean> future = executor
+                            .submit(() -> executeSqlImport(dataSource, sqlFinal, mapping, impCount, errMsg));
+                        futures.add(future);
+                    }
+
+                    for (Future<Boolean> future : futures) {
+                        future.get();
+                    }
+
+                    executor.shutdown();
+                } else {
+                    executeSqlImport(dataSource, sql, mapping, impCount, errMsg);
+                }
+            } else {
+                logger.info("自动ETL,无需统计记录总条数,直接进行ETL, index: {}", esIndex);
+                executeSqlImport(dataSource, sql, mapping, impCount, errMsg);
+            }
+
+            logger.info("数据全量导入完成,一共导入 {} 条数据, 耗时: {}", impCount.get(), System.currentTimeMillis() - start);
+            etlResult.setResultMessage("导入ES索引 " + esIndex + " 数据:" + impCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(esIndex + " etl failed! ==>" + e.getMessage());
+        }
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    private void processFailBulkResponse(BulkResponse bulkResponse, boolean hasParent) {
+        for (BulkItemResponse response : bulkResponse.getItems()) {
+            if (!response.isFailed()) {
+                continue;
+            }
+
+            if (response.getFailure().getStatus() == RestStatus.NOT_FOUND) {
+                logger.warn(response.getFailureMessage());
+            } else {
+                logger.error("全量导入数据有误 {}", response.getFailureMessage());
+                throw new RuntimeException("全量数据 etl 异常: " + response.getFailureMessage());
+            }
+        }
+    }
+
+    private boolean executeSqlImport(DataSource ds, String sql, ESMapping mapping, AtomicLong impCount,
+                                     List<String> errMsg) {
+        try {
+            ESSyncUtil.sqlRS(ds, sql, rs -> {
+                int count = 0;
+                try {
+                    BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+
+                    long batchBegin = System.currentTimeMillis();
+                    while (rs.next()) {
+                        Map<String, Object> esFieldData = new LinkedHashMap<>();
+                        for (FieldItem fieldItem : mapping.getSchemaItem().getSelectFields().values()) {
+
+                            // 如果是主键字段则不插入
+                            if (fieldItem.getFieldName().equals(mapping.get_id())) {
+                                continue;
+                            }
+
+                            String fieldName = fieldItem.getFieldName();
+                            if (mapping.getSkips().contains(fieldName)) {
+                                continue;
+                            }
+
+                            Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName);
+                            esFieldData.put(fieldName, val);
+                        }
+                        Object idVal = null;
+                        if (mapping.get_id() != null) {
+                            idVal = rs.getObject(mapping.get_id());
+                        }
+
+                        if (idVal != null) {
+                            if (mapping.getParent() == null) {
+                                bulkRequestBuilder.add(transportClient
+                                    .prepareIndex(mapping.get_index(), mapping.get_type(), idVal.toString())
+                                    .setSource(esFieldData));
+                            } else {
+                                // ignore
+                            }
+                        } else {
+                            idVal = rs.getObject(mapping.getPk());
+                            if (mapping.getParent() == null) {
+                                // 删除pk对应的数据
+                                SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                                    .setTypes(mapping.get_type())
+                                    .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal))
+                                    .get();
+                                for (SearchHit hit : response.getHits()) {
+                                    bulkRequestBuilder.add(transportClient
+                                        .prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+                                }
+
+                                bulkRequestBuilder
+                                    .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type())
+                                        .setSource(esFieldData));
+                            } else {
+                                // ignore
+                            }
+                        }
+
+                        if (bulkRequestBuilder.numberOfActions() % mapping.getCommitBatch() == 0
+                            && bulkRequestBuilder.numberOfActions() > 0) {
+                            long esBatchBegin = System.currentTimeMillis();
+                            BulkResponse rp = bulkRequestBuilder.execute().actionGet();
+                            if (rp.hasFailures()) {
+                                this.processFailBulkResponse(rp, Objects.nonNull(mapping.getParent()));
+                            }
+
+                            if (logger.isDebugEnabled()) {
+                                logger.debug("全量数据批量导入批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
+                                    (System.currentTimeMillis() - batchBegin),
+                                    (System.currentTimeMillis() - esBatchBegin),
+                                    bulkRequestBuilder.numberOfActions(),
+                                    mapping.get_index());
+                            }
+                            batchBegin = System.currentTimeMillis();
+                            bulkRequestBuilder = transportClient.prepareBulk();
+                        }
+                        count++;
+                        impCount.incrementAndGet();
+                    }
+
+                    if (bulkRequestBuilder.numberOfActions() > 0) {
+                        long esBatchBegin = System.currentTimeMillis();
+                        BulkResponse rp = bulkRequestBuilder.execute().actionGet();
+                        if (rp.hasFailures()) {
+                            this.processFailBulkResponse(rp, Objects.nonNull(mapping.getParent()));
+                        }
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("全量数据批量导入最后批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}",
+                                (System.currentTimeMillis() - batchBegin),
+                                (System.currentTimeMillis() - esBatchBegin),
+                                bulkRequestBuilder.numberOfActions(),
+                                mapping.get_index());
+                        }
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                    errMsg.add(mapping.get_index() + " etl failed! ==>" + e.getMessage());
+                    throw new RuntimeException(e);
+                }
+                return count;
+            });
+
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+}

+ 862 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/service/ESSyncService.java

@@ -0,0 +1,862 @@
+package com.alibaba.otter.canal.client.adapter.es.service;
+
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.sql.DataSource;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.TableItem;
+import com.alibaba.otter.canal.client.adapter.es.support.ESSyncUtil;
+import com.alibaba.otter.canal.client.adapter.es.support.ESTemplate;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+/**
+ * ES 同步 Service
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESSyncService {
+
+    private static Logger logger = LoggerFactory.getLogger(ESSyncService.class);
+
+    private ESTemplate    esTemplate;
+
+    public ESSyncService(ESTemplate esTemplate){
+        this.esTemplate = esTemplate;
+    }
+
+    public void sync(Collection<ESSyncConfig> esSyncConfigs, Dml dml) {
+        long begin = System.currentTimeMillis();
+        if (esSyncConfigs != null) {
+            if (logger.isTraceEnabled()) {
+                logger.trace("Destination: {}, database:{}, table:{}, type:{}, effect index count: {}",
+                    dml.getDestination(),
+                    dml.getDatabase(),
+                    dml.getTable(),
+                    dml.getType(),
+                    esSyncConfigs.size());
+            }
+
+            for (ESSyncConfig config : esSyncConfigs) {
+                if (logger.isTraceEnabled()) {
+                    logger.trace("Prepared to sync index: {}, destination: {}",
+                        config.getEsMapping().get_index(),
+                        dml.getDestination());
+                }
+                this.sync(config, dml);
+                if (logger.isTraceEnabled()) {
+                    logger.trace("Sync completed: {}, destination: {}",
+                        config.getEsMapping().get_index(),
+                        dml.getDestination());
+                }
+            }
+            if (logger.isTraceEnabled()) {
+                logger.trace("Sync elapsed time: {} ms, effect index count:{}, destination: {}",
+                    (System.currentTimeMillis() - begin),
+                    esSyncConfigs.size(),
+                    dml.getDestination());
+            }
+            if (logger.isDebugEnabled()) {
+                logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+            }
+        }
+    }
+
+    public void sync(ESSyncConfig config, Dml dml) {
+        try {
+            // 如果是按时间戳定时更新则返回
+            if (config.getEsMapping().isSyncByTimestamp()) {
+                return;
+            }
+
+            long begin = System.currentTimeMillis();
+
+            String type = dml.getType();
+            if (type != null && type.equalsIgnoreCase("INSERT")) {
+                insert(config, dml);
+            } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                update(config, dml);
+            } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                delete(config, dml);
+            }
+
+            if (logger.isTraceEnabled()) {
+                logger.trace("Sync elapsed time: {} ms,destination: {}, es index: {}",
+                    (System.currentTimeMillis() - begin),
+                    dml.getDestination(),
+                    config.getEsMapping().get_index());
+            }
+        } catch (Exception e) {
+            logger.error("sync error, es index: {}, DML : {}", config.getEsMapping().get_index(), dml);
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 插入操作dml
+     * 
+     * @param config es配置
+     * @param dml dml数据
+     */
+    private void insert(ESSyncConfig config, Dml dml) {
+        List<Map<String, Object>> dataList = dml.getData();
+        if (dataList == null || dataList.isEmpty()) {
+            return;
+        }
+        SchemaItem schemaItem = config.getEsMapping().getSchemaItem();
+        for (Map<String, Object> data : dataList) {
+            if (data == null || data.isEmpty()) {
+                continue;
+            }
+
+            if (schemaItem.getAliasTableItems().size() == 1 && schemaItem.isAllFieldsSimple()) {
+                // ------单表 & 所有字段都为简单字段------
+                singleTableSimpleFiledInsert(config, dml, data);
+            } else {
+                // ------是主表 查询sql来插入------
+                if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) {
+                    mainTableInsert(config, dml, data);
+                }
+
+                // 从表的操作
+                for (TableItem tableItem : schemaItem.getAliasTableItems().values()) {
+                    if (tableItem.isMain()) {
+                        continue;
+                    }
+                    if (!tableItem.getTableName().equals(dml.getTable())) {
+                        continue;
+                    }
+                    // 关联条件出现在主表查询条件是否为简单字段
+                    boolean allFieldsSimple = true;
+                    for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                        if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                            allFieldsSimple = false;
+                            break;
+                        }
+                    }
+                    // 所有查询字段均为简单字段
+                    if (allFieldsSimple) {
+                        // 不是子查询
+                        if (!tableItem.isSubQuery()) {
+                            // ------关联表简单字段插入------
+                            Map<String, Object> esFieldData = new LinkedHashMap<>();
+                            for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                                Object value = esTemplate.getValFromData(config.getEsMapping(),
+                                    data,
+                                    fieldItem.getFieldName(),
+                                    fieldItem.getColumn().getColumnName());
+                                esFieldData.put(fieldItem.getFieldName(), value);
+                            }
+
+                            joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
+                        } else {
+                            // ------关联子表简单字段插入------
+                            subTableSimpleFieldOperation(config, dml, data, null, tableItem);
+                        }
+                    } else {
+                        // ------关联子表复杂字段插入 执行全sql更新es------
+                        wholeSqlOperation(config, dml, data, null, tableItem);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * 更新操作dml
+     *
+     * @param config es配置
+     * @param dml dml数据
+     */
+    private void update(ESSyncConfig config, Dml dml) {
+        List<Map<String, Object>> dataList = dml.getData();
+        List<Map<String, Object>> oldList = dml.getOld();
+        if (dataList == null || dataList.isEmpty() || oldList == null || oldList.isEmpty()) {
+            return;
+        }
+        SchemaItem schemaItem = config.getEsMapping().getSchemaItem();
+        int i = 0;
+        for (Map<String, Object> data : dataList) {
+            Map<String, Object> old = oldList.get(i);
+            if (data == null || data.isEmpty() || old == null || old.isEmpty()) {
+                continue;
+            }
+
+            if (schemaItem.getAliasTableItems().size() == 1 && schemaItem.isAllFieldsSimple()) {
+                // ------单表 & 所有字段都为简单字段------
+                singleTableSimpleFiledUpdate(config, dml, data, old);
+            } else {
+                // ------主表 查询sql来更新------
+                if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) {
+                    ESMapping mapping = config.getEsMapping();
+                    String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+                    FieldItem idFieldItem = schemaItem.getSelectFields().get(idFieldName);
+
+                    boolean idFieldSimple = true;
+                    if (idFieldItem.isMethod() || idFieldItem.isBinaryOp()) {
+                        idFieldSimple = false;
+                    }
+
+                    boolean allUpdateFieldSimple = true;
+                    out: for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+                        for (ColumnItem columnItem : fieldItem.getColumnItems()) {
+                            if (old.containsKey(columnItem.getColumnName())) {
+                                if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                                    allUpdateFieldSimple = false;
+                                    break out;
+                                }
+                            }
+                        }
+                    }
+
+                    // 不支持主键更新!!
+
+                    // 判断是否有外键更新
+                    boolean fkChanged = false;
+                    for (TableItem tableItem : schemaItem.getAliasTableItems().values()) {
+                        if (tableItem.isMain()) {
+                            continue;
+                        }
+                        boolean changed = false;
+                        for (List<FieldItem> fieldItems : tableItem.getRelationTableFields().values()) {
+                            for (FieldItem fieldItem : fieldItems) {
+                                if (old.containsKey(fieldItem.getColumn().getColumnName())) {
+                                    fkChanged = true;
+                                    changed = true;
+                                    break;
+                                }
+                            }
+                        }
+                        // 如果外键有修改,则更新所对应该表的所有查询条件数据
+                        if (changed) {
+                            for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                                fieldItem.getColumnItems()
+                                    .forEach(columnItem -> old.put(columnItem.getColumnName(), null));
+                            }
+                        }
+                    }
+
+                    // 判断主键和所更新的字段是否全为简单字段
+                    if (idFieldSimple && allUpdateFieldSimple && !fkChanged) {
+                        singleTableSimpleFiledUpdate(config, dml, data, old);
+                    } else {
+                        mainTableUpdate(config, dml, data, old);
+                    }
+                }
+
+                // 从表的操作
+                for (TableItem tableItem : schemaItem.getAliasTableItems().values()) {
+                    if (tableItem.isMain()) {
+                        continue;
+                    }
+                    if (!tableItem.getTableName().equals(dml.getTable())) {
+                        continue;
+                    }
+
+                    // 关联条件出现在主表查询条件是否为简单字段
+                    boolean allFieldsSimple = true;
+                    for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                        if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                            allFieldsSimple = false;
+                            break;
+                        }
+                    }
+
+                    // 所有查询字段均为简单字段
+                    if (allFieldsSimple) {
+                        // 不是子查询
+                        if (!tableItem.isSubQuery()) {
+                            // ------关联表简单字段更新------
+                            Map<String, Object> esFieldData = new LinkedHashMap<>();
+                            for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                                if (old.containsKey(fieldItem.getColumn().getColumnName())) {
+                                    Object value = esTemplate.getValFromData(config.getEsMapping(),
+                                        data,
+                                        fieldItem.getFieldName(),
+                                        fieldItem.getColumn().getColumnName());
+                                    esFieldData.put(fieldItem.getFieldName(), value);
+                                }
+                            }
+                            joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
+                        } else {
+                            // ------关联子表简单字段更新------
+                            subTableSimpleFieldOperation(config, dml, data, old, tableItem);
+                        }
+                    } else {
+                        // ------关联子表复杂字段更新 执行全sql更新es------
+                        wholeSqlOperation(config, dml, data, old, tableItem);
+                    }
+                }
+            }
+
+            i++;
+        }
+    }
+
+    /**
+     * 删除操作dml
+     *
+     * @param config es配置
+     * @param dml dml数据
+     */
+    private void delete(ESSyncConfig config, Dml dml) {
+        List<Map<String, Object>> dataList = dml.getData();
+        if (dataList == null || dataList.isEmpty()) {
+            return;
+        }
+        SchemaItem schemaItem = config.getEsMapping().getSchemaItem();
+
+        for (Map<String, Object> data : dataList) {
+            if (data == null || data.isEmpty()) {
+                continue;
+            }
+
+            ESMapping mapping = config.getEsMapping();
+
+            // ------是主表------
+            if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) {
+                FieldItem idFieldItem = schemaItem.getIdFieldItem(mapping);
+                // 主键为简单字段
+                if (!idFieldItem.isMethod() && !idFieldItem.isBinaryOp()) {
+                    Object idVal = esTemplate.getValFromData(mapping,
+                        data,
+                        idFieldItem.getFieldName(),
+                        idFieldItem.getColumn().getColumnName());
+
+                    if (logger.isTraceEnabled()) {
+                        logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                    boolean result = esTemplate.delete(mapping, idVal);
+                    if (!result) {
+                        logger.error("Main table delete es index error, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                } else {
+                    // ------主键带函数, 查询sql获取主键删除------
+                    mainTableDelete(config, dml, data);
+                }
+            }
+
+            // 从表的操作
+            for (TableItem tableItem : schemaItem.getAliasTableItems().values()) {
+                if (tableItem.isMain()) {
+                    continue;
+                }
+                if (!tableItem.getTableName().equals(dml.getTable())) {
+                    continue;
+                }
+
+                // 关联条件出现在主表查询条件是否为简单字段
+                boolean allFieldsSimple = true;
+                for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                    if (fieldItem.isMethod() || fieldItem.isBinaryOp()) {
+                        allFieldsSimple = false;
+                        break;
+                    }
+                }
+
+                // 所有查询字段均为简单字段
+                if (allFieldsSimple) {
+                    // 不是子查询
+                    if (!tableItem.isSubQuery()) {
+                        // ------关联表简单字段更新为null------
+                        Map<String, Object> esFieldData = new LinkedHashMap<>();
+                        for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                            esFieldData.put(fieldItem.getFieldName(), null);
+                        }
+                        joinTableSimpleFieldOperation(config, dml, data, tableItem, esFieldData);
+                    } else {
+                        // ------关联子表简单字段更新------
+                        subTableSimpleFieldOperation(config, dml, data, null, tableItem);
+                    }
+                } else {
+                    // ------关联子表复杂字段更新 执行全sql更新es------
+                    wholeSqlOperation(config, dml, data, null, tableItem);
+                }
+            }
+        }
+    }
+
+    /**
+     * 单表简单字段insert
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     */
+    private void singleTableSimpleFiledInsert(ESSyncConfig config, Dml dml, Map<String, Object> data) {
+        ESMapping mapping = config.getEsMapping();
+        Map<String, Object> esFieldData = new LinkedHashMap<>();
+        Object idVal = esTemplate.getESDataFromDmlData(mapping, data, esFieldData);
+
+        if (logger.isTraceEnabled()) {
+            logger.trace("Single table insert ot es index, destination:{}, table: {}, index: {}, id: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                idVal);
+        }
+        boolean result = esTemplate.insert(mapping, idVal, esFieldData);
+        if (!result) {
+            logger.error("Single table insert to es index error, destination:{}, table: {}, index: {}, id: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                idVal);
+        }
+    }
+
+    /**
+     * 主表(单表)复杂字段insert
+     * 
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     */
+    private void mainTableInsert(ESSyncConfig config, Dml dml, Map<String, Object> data) {
+        ESMapping mapping = config.getEsMapping();
+        String sql = mapping.getSql();
+        String condition = ESSyncUtil.pkConditionSql(mapping, data);
+        sql = ESSyncUtil.appendCondition(sql, condition);
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Main table insert ot es index by query sql, destination:{}, table: {}, index: {}, sql: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                sql.replace("\n", " "));
+        }
+        ESSyncUtil.sqlRS(ds, sql, rs -> {
+            try {
+                while (rs.next()) {
+                    Map<String, Object> esFieldData = new LinkedHashMap<>();
+                    Object idVal = esTemplate.getESDataFromRS(mapping, rs, esFieldData);
+
+                    if (logger.isTraceEnabled()) {
+                        logger.trace(
+                            "Main table insert ot es index by query sql, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                    boolean result = esTemplate.insert(mapping, idVal, esFieldData);
+                    if (!result) {
+                        logger.error(
+                            "Main table insert to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            return 0;
+        });
+    }
+
+    private void mainTableDelete(ESSyncConfig config, Dml dml, Map<String, Object> data) {
+        ESMapping mapping = config.getEsMapping();
+        String sql = mapping.getSql();
+        String condition = ESSyncUtil.pkConditionSql(mapping, data);
+        sql = ESSyncUtil.appendCondition(sql, condition);
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Main table delete es index by query sql, destination:{}, table: {}, index: {}, sql: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                sql.replace("\n", " "));
+        }
+        ESSyncUtil.sqlRS(ds, sql, rs -> {
+            try {
+                while (rs.next()) {
+                    Object idVal = esTemplate.getIdValFromRS(mapping, rs);
+
+                    if (logger.isTraceEnabled()) {
+                        logger.trace(
+                            "Main table delete ot es index by query sql, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                    boolean result = esTemplate.delete(mapping, idVal);
+                    if (!result) {
+                        logger.error(
+                            "Main table delete to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            return 0;
+        });
+    }
+
+    /**
+     * 关联表主表简单字段operation
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     * @param tableItem 当前表配置
+     */
+    private void joinTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map<String, Object> data,
+                                               TableItem tableItem, Map<String, Object> esFieldData) {
+        ESMapping mapping = config.getEsMapping();
+
+        Map<String, Object> paramsTmp = new LinkedHashMap<>();
+        for (Map.Entry<FieldItem, List<FieldItem>> entry : tableItem.getRelationTableFields().entrySet()) {
+            for (FieldItem fieldItem : entry.getValue()) {
+                if (fieldItem.getColumnItems().size() == 1) {
+                    Object value = esTemplate.getValFromData(mapping,
+                        data,
+                        fieldItem.getFieldName(),
+                        entry.getKey().getColumn().getColumnName());
+
+                    String fieldName = fieldItem.getFieldName();
+                    // 判断是否是主键
+                    if (fieldName.equals(mapping.get_id())) {
+                        fieldName = "_id";
+                    }
+                    paramsTmp.put(fieldName, value);
+                }
+            }
+        }
+
+        if (logger.isDebugEnabled()) {
+            logger.trace("Join table update es index by foreign key, destination:{}, table: {}, index: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index());
+        }
+        boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
+        if (!result) {
+            logger.error("Join table update es index by foreign key error, destination:{}, table: {}, index: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index());
+        }
+    }
+
+    /**
+     * 关联子查询, 主表简单字段operation
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     * @param old 单行old数据
+     * @param tableItem 当前表配置
+     */
+    private void subTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map<String, Object> data,
+                                              Map<String, Object> old, TableItem tableItem) {
+        ESMapping mapping = config.getEsMapping();
+        StringBuilder sql = new StringBuilder(
+            "SELECT * FROM (" + tableItem.getSubQuerySql() + ") " + tableItem.getAlias() + " WHERE ");
+
+        for (FieldItem fkFieldItem : tableItem.getRelationTableFields().keySet()) {
+            String columnName = fkFieldItem.getColumn().getColumnName();
+            Object value = esTemplate.getValFromData(mapping, data, fkFieldItem.getFieldName(), columnName);
+            ESSyncUtil.appendCondition(sql, value, tableItem.getAlias(), columnName);
+        }
+        int len = sql.length();
+        sql.delete(len - 5, len);
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Join table update es index by query sql, destination:{}, table: {}, index: {}, sql: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                sql.toString().replace("\n", " "));
+        }
+        ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+            try {
+                while (rs.next()) {
+                    Map<String, Object> esFieldData = new LinkedHashMap<>();
+
+                    for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                        if (old != null) {
+                            out: for (FieldItem fieldItem1 : tableItem.getSubQueryFields()) {
+                                for (ColumnItem columnItem0 : fieldItem.getColumnItems()) {
+                                    if (fieldItem1.getFieldName().equals(columnItem0.getColumnName()))
+                                        for (ColumnItem columnItem : fieldItem1.getColumnItems()) {
+                                            if (old.containsKey(columnItem.getColumnName())) {
+                                                Object val = esTemplate.getValFromRS(mapping,
+                                                    rs,
+                                                    fieldItem.getFieldName(),
+                                                    fieldItem.getColumn().getColumnName());
+                                                esFieldData.put(fieldItem.getFieldName(), val);
+                                                break out;
+                                            }
+                                        }
+                                }
+                            }
+                        } else {
+                            Object val = esTemplate.getValFromRS(mapping,
+                                rs,
+                                fieldItem.getFieldName(),
+                                fieldItem.getColumn().getColumnName());
+                            esFieldData.put(fieldItem.getFieldName(), val);
+                        }
+                    }
+
+                    Map<String, Object> paramsTmp = new LinkedHashMap<>();
+                    for (Map.Entry<FieldItem, List<FieldItem>> entry : tableItem.getRelationTableFields().entrySet()) {
+                        for (FieldItem fieldItem : entry.getValue()) {
+                            if (fieldItem.getColumnItems().size() == 1) {
+                                Object value = esTemplate.getValFromRS(mapping,
+                                    rs,
+                                    fieldItem.getFieldName(),
+                                    entry.getKey().getColumn().getColumnName());
+                                String fieldName = fieldItem.getFieldName();
+                                // 判断是否是主键
+                                if (fieldName.equals(mapping.get_id())) {
+                                    fieldName = "_id";
+                                }
+                                paramsTmp.put(fieldName, value);
+                            }
+                        }
+                    }
+
+                    if (logger.isDebugEnabled()) {
+                        logger.trace("Join table update es index by query sql, destination:{}, table: {}, index: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index());
+                    }
+                    boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
+                    if (!result) {
+                        logger.error(
+                            "Join table update es index by query sql error, destination:{}, table: {}, index: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index());
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            return 0;
+        });
+    }
+
+    /**
+     * 关联(子查询), 主表复杂字段operation, 全sql执行
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     * @param tableItem 当前表配置
+     */
+    private void wholeSqlOperation(ESSyncConfig config, Dml dml, Map<String, Object> data, Map<String, Object> old,
+                                   TableItem tableItem) {
+        ESMapping mapping = config.getEsMapping();
+        StringBuilder sql = new StringBuilder(mapping.getSql() + " WHERE ");
+
+        for (FieldItem fkFieldItem : tableItem.getRelationTableFields().keySet()) {
+            String columnName = fkFieldItem.getColumn().getColumnName();
+            Object value = esTemplate.getValFromData(mapping, data, fkFieldItem.getFieldName(), columnName);
+            ESSyncUtil.appendCondition(sql, value, tableItem.getAlias(), columnName);
+        }
+        int len = sql.length();
+        sql.delete(len - 5, len);
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Join table update es index by query whole sql, destination:{}, table: {}, index: {}, sql: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                sql.toString().replace("\n", " "));
+        }
+        ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+            try {
+                while (rs.next()) {
+                    Map<String, Object> esFieldData = new LinkedHashMap<>();
+                    for (FieldItem fieldItem : tableItem.getRelationSelectFieldItems()) {
+                        if (old != null) {
+                            // 从表子查询
+                            out: for (FieldItem fieldItem1 : tableItem.getSubQueryFields()) {
+                                for (ColumnItem columnItem0 : fieldItem.getColumnItems()) {
+                                    if (fieldItem1.getFieldName().equals(columnItem0.getColumnName()))
+                                        for (ColumnItem columnItem : fieldItem1.getColumnItems()) {
+                                            if (old.containsKey(columnItem.getColumnName())) {
+                                                Object val = esTemplate.getValFromRS(mapping,
+                                                    rs,
+                                                    fieldItem.getFieldName(),
+                                                    fieldItem.getFieldName());
+                                                esFieldData.put(fieldItem.getFieldName(), val);
+                                                break out;
+                                            }
+                                        }
+                                }
+                            }
+                            // 从表非子查询
+                            for (FieldItem fieldItem1 : tableItem.getRelationSelectFieldItems()) {
+                                if (fieldItem1.equals(fieldItem)) {
+                                    for (ColumnItem columnItem : fieldItem1.getColumnItems()) {
+                                        if (old.containsKey(columnItem.getColumnName())) {
+                                            Object val = esTemplate.getValFromRS(mapping,
+                                                rs,
+                                                fieldItem.getFieldName(),
+                                                fieldItem.getFieldName());
+                                            esFieldData.put(fieldItem.getFieldName(), val);
+                                            break;
+                                        }
+                                    }
+                                }
+                            }
+                        } else {
+                            Object val = esTemplate
+                                .getValFromRS(mapping, rs, fieldItem.getFieldName(), fieldItem.getFieldName());
+                            esFieldData.put(fieldItem.getFieldName(), val);
+                        }
+                    }
+
+                    Map<String, Object> paramsTmp = new LinkedHashMap<>();
+                    for (Map.Entry<FieldItem, List<FieldItem>> entry : tableItem.getRelationTableFields().entrySet()) {
+                        for (FieldItem fieldItem : entry.getValue()) {
+                            Object value = esTemplate
+                                .getValFromRS(mapping, rs, fieldItem.getFieldName(), fieldItem.getFieldName());
+                            String fieldName = fieldItem.getFieldName();
+                            // 判断是否是主键
+                            if (fieldName.equals(mapping.get_id())) {
+                                fieldName = "_id";
+                            }
+                            paramsTmp.put(fieldName, value);
+                        }
+                    }
+
+                    if (logger.isDebugEnabled()) {
+                        logger.trace(
+                            "Join table update es index by query whole sql, destination:{}, table: {}, index: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index());
+                    }
+                    boolean result = esTemplate.updateByQuery(config, paramsTmp, esFieldData);
+                    if (!result) {
+                        logger.error(
+                            "Join table update es index by query whole sql error, destination:{}, table: {}, index: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index());
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            return 0;
+        });
+    }
+
+    /**
+     * 单表简单字段update
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行data数据
+     * @param old 单行old数据
+     */
+    private void singleTableSimpleFiledUpdate(ESSyncConfig config, Dml dml, Map<String, Object> data,
+                                              Map<String, Object> old) {
+        ESMapping mapping = config.getEsMapping();
+        Map<String, Object> esFieldData = new LinkedHashMap<>();
+
+        Object idVal = esTemplate.getESDataFromDmlData(mapping, data, old, esFieldData);
+
+        if (logger.isTraceEnabled()) {
+            logger.trace("Main table update ot es index, destination:{}, table: {}, index: {}, id: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                idVal);
+        }
+        boolean result = esTemplate.update(mapping, idVal, esFieldData);
+        if (!result) {
+            logger.error("Main table update to es index error, destination:{}, table: {}, index: {}, id: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                idVal);
+        }
+    }
+
+    /**
+     * 主表(单表)复杂字段update
+     *
+     * @param config es配置
+     * @param dml dml信息
+     * @param data 单行dml数据
+     */
+    private void mainTableUpdate(ESSyncConfig config, Dml dml, Map<String, Object> data, Map<String, Object> old) {
+        ESMapping mapping = config.getEsMapping();
+        String sql = mapping.getSql();
+        String condition = ESSyncUtil.pkConditionSql(mapping, data);
+        sql = ESSyncUtil.appendCondition(sql, condition);
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+        if (logger.isTraceEnabled()) {
+            logger.trace("Main table update ot es index by query sql, destination:{}, table: {}, index: {}, sql: {}",
+                config.getDestination(),
+                dml.getTable(),
+                mapping.get_index(),
+                sql.replace("\n", " "));
+        }
+        ESSyncUtil.sqlRS(ds, sql, rs -> {
+            try {
+                while (rs.next()) {
+                    Map<String, Object> esFieldData = new LinkedHashMap<>();
+                    Object idVal = esTemplate.getESDataFromRS(mapping, rs, old, esFieldData);
+
+                    if (logger.isTraceEnabled()) {
+                        logger.trace(
+                            "Main table update ot es index by query sql, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                    boolean result = esTemplate.update(mapping, idVal, esFieldData);
+                    if (!result) {
+                        logger.error(
+                            "Main table update to es index by query sql error, destination:{}, table: {}, index: {}, id: {}",
+                            config.getDestination(),
+                            dml.getTable(),
+                            mapping.get_index(),
+                            idVal);
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            return 0;
+        });
+    }
+}

+ 335 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESSyncUtil.java

@@ -0,0 +1,335 @@
+package com.alibaba.otter.canal.client.adapter.es.support;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.sql.*;
+import java.util.*;
+import java.util.Date;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.codec.binary.Base64;
+import org.joda.time.DateTime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.TableItem;
+
+/**
+ * ES 同步工具同类
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESSyncUtil {
+
+    private static Logger logger = LoggerFactory.getLogger(ESSyncUtil.class);
+
+    public static Object convertToEsObj(Object val, String fieldInfo) {
+        if (val == null) {
+            return null;
+        }
+        if (fieldInfo.startsWith("array:")) {
+            String separator = fieldInfo.substring("array:".length()).trim();
+            String[] values = val.toString().split(separator);
+            return Arrays.asList(values);
+        } else if (fieldInfo.startsWith("object")) {
+            return JSON.parse(val.toString());
+        }
+        return null;
+    }
+
+    /**
+     * 类型转换为Mapping中对应的类型
+     */
+    public static Object typeConvert(Object val, String esType) {
+        if (val == null) {
+            return null;
+        }
+        if (esType == null) {
+            return val;
+        }
+        Object res = null;
+        if ("integer".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).intValue();
+            } else {
+                res = Integer.parseInt(val.toString());
+            }
+        } else if ("long".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).longValue();
+            } else {
+                res = Long.parseLong(val.toString());
+            }
+        } else if ("short".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).shortValue();
+            } else {
+                res = Short.parseShort(val.toString());
+            }
+        } else if ("byte".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).byteValue();
+            } else {
+                res = Byte.parseByte(val.toString());
+            }
+        } else if ("double".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).doubleValue();
+            } else {
+                res = Double.parseDouble(val.toString());
+            }
+        } else if ("float".equals(esType) || "half_float".equals(esType) || "scaled_float".equals(esType)) {
+            if (val instanceof Number) {
+                res = ((Number) val).floatValue();
+            } else {
+                res = Float.parseFloat(val.toString());
+            }
+        } else if ("boolean".equals(esType)) {
+            if (val instanceof Boolean) {
+                res = val;
+            } else if (val instanceof Number) {
+                int v = ((Number) val).intValue();
+                res = v != 0;
+            } else {
+                res = Boolean.parseBoolean(val.toString());
+            }
+        } else if ("date".equals(esType)) {
+            if (val instanceof java.sql.Time) {
+                DateTime dateTime = new DateTime(((java.sql.Time) val).getTime());
+                if (dateTime.getMillisOfSecond() != 0) {
+                    res = dateTime.toString("HH:mm:ss.SSS");
+                } else {
+                    res = dateTime.toString("HH:mm:ss");
+                }
+            } else if (val instanceof java.sql.Timestamp) {
+                DateTime dateTime = new DateTime(((java.sql.Timestamp) val).getTime());
+                if (dateTime.getMillisOfSecond() != 0) {
+                    res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
+                } else {
+                    res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss+08:00");
+                }
+            } else if (val instanceof java.sql.Date || val instanceof Date) {
+                DateTime dateTime;
+                if (val instanceof java.sql.Date) {
+                    dateTime = new DateTime(((java.sql.Date) val).getTime());
+                } else {
+                    dateTime = new DateTime(((Date) val).getTime());
+                }
+                if (dateTime.getHourOfDay() == 0 && dateTime.getMinuteOfHour() == 0 && dateTime.getSecondOfMinute() == 0
+                    && dateTime.getMillisOfSecond() == 0) {
+                    res = dateTime.toString("yyyy-MM-dd");
+                } else {
+                    if (dateTime.getMillisOfSecond() != 0) {
+                        res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
+                    } else {
+                        res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss+08:00");
+                    }
+                }
+            } else if (val instanceof Long) {
+                DateTime dateTime = new DateTime(((Long) val).longValue());
+                if (dateTime.getHourOfDay() == 0 && dateTime.getMinuteOfHour() == 0 && dateTime.getSecondOfMinute() == 0
+                    && dateTime.getMillisOfSecond() == 0) {
+                    res = dateTime.toString("yyyy-MM-dd");
+                } else if (dateTime.getMillisOfSecond() != 0) {
+                    res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
+                } else {
+                    res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss+08:00");
+                }
+            } else if (val instanceof String) {
+                String v = ((String) val).trim();
+                if (v.length() > 18 && v.charAt(4) == '-' && v.charAt(7) == '-' && v.charAt(10) == ' '
+                    && v.charAt(13) == ':' && v.charAt(16) == ':') {
+                    String dt = v.substring(0, 10) + "T" + v.substring(11);
+                    DateTime dateTime = new DateTime(dt);
+                    if (dateTime.getMillisOfSecond() != 0) {
+                        res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
+                    } else {
+                        res = dateTime.toString("yyyy-MM-dd'T'HH:mm:ss+08:00");
+                    }
+                } else if (v.length() == 10 && v.charAt(4) == '-' && v.charAt(7) == '-') {
+                    DateTime dateTime = new DateTime(v);
+                    res = dateTime.toString("yyyy-MM-dd");
+                }
+            }
+        } else if ("binary".equals(esType)) {
+            if (val instanceof byte[]) {
+                Base64 base64 = new Base64();
+                res = base64.encodeAsString((byte[]) val);
+            } else if (val instanceof Blob) {
+                byte[] b = blobToBytes((Blob) val);
+                Base64 base64 = new Base64();
+                res = base64.encodeAsString(b);
+            } else if (val instanceof String) {
+                // 对应canal中的单字节编码
+                byte[] b = ((String) val).getBytes(StandardCharsets.ISO_8859_1);
+                Base64 base64 = new Base64();
+                res = base64.encodeAsString(b);
+            }
+        } else if ("geo_point".equals(esType)) {
+            if (!(val instanceof String)) {
+                logger.error("es type is geo_point, but source type is not String");
+                return val;
+            }
+
+            if (!((String) val).contains(",")) {
+                logger.error("es type is geo_point, source value not contains ',' separator");
+                return val;
+            }
+
+            String[] point = ((String) val).split(",");
+            Map<String, Double> location = new HashMap<>();
+            location.put("lat", Double.valueOf(point[0].trim()));
+            location.put("lon", Double.valueOf(point[1].trim()));
+            return location;
+        } else if ("array".equals(esType)) {
+            if ("".equals(val.toString().trim())) {
+                res = new ArrayList<>();
+            } else {
+                String value = val.toString();
+                String separator = ",";
+                if (!value.contains(",")) {
+                    if (value.contains(";")) {
+                        separator = ";";
+                    } else if (value.contains("|")) {
+                        separator = "|";
+                    } else if (value.contains("-")) {
+                        separator = "-";
+                    }
+                }
+                String[] values = value.split(separator);
+                return Arrays.asList(values);
+            }
+        } else if ("object".equals(esType)) {
+            if ("".equals(val.toString().trim())) {
+                res = new HashMap<>();
+            } else {
+                res = JSON.parseObject(val.toString(), Map.class);
+            }
+        } else {
+            // 其他类全以字符串处理
+            res = val.toString();
+        }
+
+        return res;
+    }
+
+    /**
+     * Blob转byte[]
+     */
+    private static byte[] blobToBytes(Blob blob) {
+        try (InputStream is = blob.getBinaryStream()) {
+            byte[] b = new byte[(int) blob.length()];
+            is.read(b);
+            return b;
+        } catch (IOException | SQLException e) {
+            logger.error(e.getMessage());
+            return null;
+        }
+    }
+
+    /**
+     * 拼接主键条件
+     * 
+     * @param mapping
+     * @param data
+     * @return
+     */
+    public static String pkConditionSql(ESMapping mapping, Map<String, Object> data) {
+        Set<ColumnItem> idColumns = new LinkedHashSet<>();
+        SchemaItem schemaItem = mapping.getSchemaItem();
+
+        TableItem mainTable = schemaItem.getMainTable();
+
+        for (ColumnItem idColumnItem : schemaItem.getIdFieldItem(mapping).getColumnItems()) {
+            if ((mainTable.getAlias() == null && idColumnItem.getOwner() == null)
+                || (mainTable.getAlias() != null && mainTable.getAlias().equals(idColumnItem.getOwner()))) {
+                idColumns.add(idColumnItem);
+            }
+        }
+
+        if (idColumns.isEmpty()) {
+            throw new RuntimeException("Not found primary key field in main table");
+        }
+
+        // 拼接condition
+        StringBuilder condition = new StringBuilder(" ");
+        for (ColumnItem idColumn : idColumns) {
+            Object idVal = data.get(idColumn.getColumnName());
+            if (mainTable.getAlias() != null) condition.append(mainTable.getAlias()).append(".");
+            condition.append(idColumn.getColumnName()).append("=");
+            if (idVal instanceof String) {
+                condition.append("'").append(idVal).append("' AND ");
+            } else {
+                condition.append(idVal).append(" AND ");
+            }
+        }
+
+        if (condition.toString().endsWith("AND ")) {
+            int len2 = condition.length();
+            condition.delete(len2 - 4, len2);
+        }
+        return condition.toString();
+    }
+
+    public static String appendCondition(String sql, String condition) {
+        return sql + " WHERE " + condition + " ";
+    }
+
+    public static void appendCondition(StringBuilder sql, Object value, String owner, String columnName) {
+        if (value instanceof String) {
+            sql.append(owner).append(".").append(columnName).append("='").append(value).append("'  AND ");
+        } else {
+            sql.append(owner).append(".").append(columnName).append("=").append(value).append("  AND ");
+        }
+    }
+
+    /**
+     * 执行查询sql
+     */
+    public static Object sqlRS(DataSource ds, String sql, Function<ResultSet, Object> fun) {
+        Connection conn = null;
+        Statement smt = null;
+        ResultSet rs = null;
+        try {
+            conn = ds.getConnection();
+            smt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+            smt.setFetchSize(Integer.MIN_VALUE);
+            rs = smt.executeQuery(sql);
+
+            return fun.apply(rs);
+        } catch (SQLException e) {
+            logger.error("sqlRs has error, sql: {} ", sql);
+            throw new RuntimeException(e);
+        } finally {
+            if (rs != null) {
+                try {
+                    rs.close();
+                } catch (SQLException e) {
+                    logger.error("error to close result set");
+                }
+            }
+            if (smt != null) {
+                try {
+                    smt.close();
+                } catch (SQLException e) {
+                    logger.error("error to close statement");
+                }
+            }
+            if (conn != null) {
+                try {
+                    conn.close();
+                } catch (SQLException e) {
+                    logger.error("error to close db connection");
+                }
+            }
+        }
+    }
+}

+ 526 - 0
client-adapter/elasticsearch/src/main/java/com/alibaba/otter/canal/client/adapter/es/support/ESTemplate.java

@@ -0,0 +1,526 @@
+package com.alibaba.otter.canal.client.adapter.es.support;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+
+import javax.sql.DataSource;
+
+import com.alibaba.fastjson.JSON;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.cluster.metadata.MappingMetaData;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.UpdateByQueryAction;
+import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.util.CollectionUtils;
+
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig.ESMapping;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.ColumnItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+
+/**
+ * ES 操作模板
+ *
+ * @author rewerma 2018-11-01
+ * @version 1.0.0
+ */
+public class ESTemplate {
+
+    private static final Logger logger         = LoggerFactory.getLogger(ESTemplate.class);
+
+    private static final int    MAX_BATCH_SIZE = 1000;
+
+    private TransportClient     transportClient;
+
+    public ESTemplate(TransportClient transportClient){
+        this.transportClient = transportClient;
+    }
+
+    /**
+     * 插入数据
+     * 
+     * @param mapping
+     * @param pkVal
+     * @param esFieldData
+     * @return
+     */
+    public boolean insert(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setSource(esFieldData));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+            }
+            bulkRequestBuilder
+                .add(transportClient.prepareIndex(mapping.get_index(), mapping.get_type()).setSource(esFieldData));
+        }
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    /**
+     * 根据主键更新数据
+     * 
+     * @param mapping
+     * @param pkVal
+     * @param esFieldData
+     * @return
+     */
+    public boolean update(ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        append4Update(bulkRequestBuilder, mapping, pkVal, esFieldData);
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal,
+                              Map<String, Object> esFieldData) {
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString())
+                    .setDoc(esFieldData));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId())
+                        .setDoc(esFieldData));
+            }
+        }
+    }
+
+    /**
+     * update by query
+     *
+     * @param config
+     * @param paramsTmp
+     * @param esFieldData
+     * @return
+     */
+    public boolean updateByQuery(ESSyncConfig config, Map<String, Object> paramsTmp, Map<String, Object> esFieldData) {
+        if (paramsTmp.isEmpty()) {
+            return false;
+        }
+        ESMapping mapping = config.getEsMapping();
+        BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
+        paramsTmp.forEach((fieldName, value) -> queryBuilder.must(QueryBuilders.termsQuery(fieldName, value)));
+
+        SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+            .setTypes(mapping.get_type())
+            .setSize(0)
+            .setQuery(queryBuilder)
+            .get();
+        long count = response.getHits().getTotalHits();
+        // 如果更新量大于Max, 查询sql批量更新
+        if (count > MAX_BATCH_SIZE) {
+            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+
+            DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            // 查询sql更新
+            StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE ");
+            paramsTmp.forEach(
+                (fieldName, value) -> sql.append("_v.").append(fieldName).append("=").append(value).append(" AND "));
+            int len = sql.length();
+            sql.delete(len - 4, len);
+            ESSyncUtil.sqlRS(ds, sql.toString(), rs -> {
+                int exeCount = 1;
+                try {
+                    BulkRequestBuilder bulkRequestBuilderTmp = bulkRequestBuilder;
+                    while (rs.next()) {
+                        Object idVal = getIdValFromRS(mapping, rs);
+                        append4Update(bulkRequestBuilderTmp, mapping, idVal, esFieldData);
+
+                        if (exeCount % mapping.getCommitBatch() == 0 && bulkRequestBuilderTmp.numberOfActions() > 0) {
+                            commitBulkRequest(bulkRequestBuilderTmp);
+                            bulkRequestBuilderTmp = transportClient.prepareBulk();
+                        }
+                        exeCount++;
+                    }
+
+                    if (bulkRequestBuilder.numberOfActions() > 0) {
+                        commitBulkRequest(bulkRequestBuilderTmp);
+                    }
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
+                return 0;
+            });
+            return true;
+        } else {
+            return updateByQuery(mapping, queryBuilder, esFieldData, 1);
+        }
+    }
+
+    private boolean updateByQuery(ESMapping mapping, QueryBuilder queryBuilder, Map<String, Object> esFieldData,
+                                  int counter) {
+        if (CollectionUtils.isEmpty(esFieldData)) {
+            return true;
+        }
+
+        StringBuilder sb = new StringBuilder();
+        esFieldData.forEach((key, value) -> {
+            if (value instanceof Map) {
+                HashMap mapValue = (HashMap) value;
+                if (mapValue.containsKey("lon") && mapValue.containsKey("lat") && mapValue.size() == 2) {
+                    sb.append("ctx._source")
+                        .append("['")
+                        .append(key)
+                        .append("']")
+                        .append(" = [")
+                        .append(mapValue.get("lon"))
+                        .append(", ")
+                        .append(mapValue.get("lat"))
+                        .append("];");
+                } else {
+                    sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
+                    sb.append(JSON.toJSONString(value));
+                    sb.append(";");
+                }
+            } else if (value instanceof List) {
+                sb.append("ctx._source").append("[\"").append(key).append("\"]").append(" = ");
+                sb.append(JSON.toJSONString(value));
+                sb.append(";");
+            } else if (value instanceof String) {
+                sb.append("ctx._source")
+                    .append("['")
+                    .append(key)
+                    .append("']")
+                    .append(" = '")
+                    .append(value)
+                    .append("';");
+            } else {
+                sb.append("ctx._source").append("['").append(key).append("']").append(" = ").append(value).append(";");
+            }
+        });
+        String scriptLine = sb.toString();
+        if (logger.isTraceEnabled()) {
+            logger.trace(scriptLine);
+        }
+
+        UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(transportClient);
+        updateByQuery.source(mapping.get_index())
+            .abortOnVersionConflict(false)
+            .filter(queryBuilder)
+            .script(new Script(ScriptType.INLINE, "painless", scriptLine, Collections.emptyMap()));
+
+        BulkByScrollResponse response = updateByQuery.get();
+        if (logger.isTraceEnabled()) {
+            logger.trace("updateByQuery response: {}", response.getStatus());
+        }
+        if (!CollectionUtils.isEmpty(response.getSearchFailures())) {
+            logger.error("script update_for_search has search error: " + response.getBulkFailures());
+            return false;
+        }
+
+        if (!CollectionUtils.isEmpty(response.getBulkFailures())) {
+            logger.error("script update_for_search has update error: " + response.getBulkFailures());
+            return false;
+        }
+
+        if (response.getStatus().getVersionConflicts() > 0) {
+            if (counter >= 3) {
+                logger.error("第 {} 次执行updateByQuery, 依旧存在分片版本冲突,不再继续重试。", counter);
+                return false;
+            }
+            logger.warn("本次updateByQuery存在分片版本冲突,准备重新执行...");
+            try {
+                TimeUnit.SECONDS.sleep(1);
+            } catch (InterruptedException e) {
+                // ignore
+            }
+            return updateByQuery(mapping, queryBuilder, esFieldData, ++counter);
+        }
+
+        return true;
+    }
+
+    /**
+     * 通过主键删除数据
+     *
+     * @param mapping
+     * @param pkVal
+     * @return
+     */
+    public boolean delete(ESMapping mapping, Object pkVal) {
+        BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
+        if (mapping.get_id() != null) {
+            bulkRequestBuilder
+                .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), pkVal.toString()));
+        } else {
+            SearchResponse response = transportClient.prepareSearch(mapping.get_index())
+                .setTypes(mapping.get_type())
+                .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal))
+                .setSize(MAX_BATCH_SIZE)
+                .get();
+            for (SearchHit hit : response.getHits()) {
+                bulkRequestBuilder
+                    .add(transportClient.prepareDelete(mapping.get_index(), mapping.get_type(), hit.getId()));
+            }
+        }
+        return commitBulkRequest(bulkRequestBuilder);
+    }
+
+    /**
+     * 批量提交
+     *
+     * @param bulkRequestBuilder
+     * @return
+     */
+    private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) {
+        if (bulkRequestBuilder.numberOfActions() > 0) {
+            BulkResponse response = bulkRequestBuilder.execute().actionGet();
+            if (response.hasFailures()) {
+                for (BulkItemResponse itemResponse : response.getItems()) {
+                    if (!itemResponse.isFailed()) {
+                        continue;
+                    }
+
+                    if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) {
+                        logger.warn(itemResponse.getFailureMessage());
+                    } else {
+                        logger.error("ES sync commit error: {}", itemResponse.getFailureMessage());
+                    }
+                }
+            }
+
+            return !response.hasFailures();
+        }
+        return true;
+    }
+
+    public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName,
+                               String columnName) throws SQLException {
+        String esType = getEsType(mapping, fieldName);
+
+        Object value = resultSet.getObject(columnName);
+        if (value instanceof Boolean) {
+            if (!"boolean".equals(esType)) {
+                value = resultSet.getByte(columnName);
+            }
+        }
+
+        // 如果是对象类型
+        if (mapping.getObjFields().containsKey(fieldName)) {
+            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
+        } else {
+            return ESSyncUtil.typeConvert(value, esType);
+        }
+    }
+
+    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet,
+                                  Map<String, Object> esFieldData) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+            }
+
+            if (!fieldItem.getFieldName().equals(mapping.get_id())
+                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(), value);
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+                break;
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map<String, Object> dmlOld,
+                                  Map<String, Object> esFieldData) throws SQLException {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName());
+            }
+
+            for (ColumnItem columnItem : fieldItem.getColumnItems()) {
+                if (dmlOld.containsKey(columnItem.getColumnName())
+                    && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                    esFieldData.put(fieldItem.getFieldName(),
+                        getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()));
+                    break;
+                }
+            }
+        }
+        return resultIdVal;
+    }
+
+    public Object getValFromData(ESMapping mapping, Map<String, Object> dmlData, String fieldName, String columnName) {
+        String esType = getEsType(mapping, fieldName);
+        Object value = dmlData.get(columnName);
+        if (value instanceof Byte) {
+            if ("boolean".equals(esType)) {
+                value = ((Byte) value).intValue() != 0;
+            }
+        }
+
+        // 如果是对象类型
+        if (mapping.getObjFields().containsKey(fieldName)) {
+            return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName));
+        } else {
+            return ESSyncUtil.typeConvert(value, esType);
+        }
+    }
+
+    /**
+     * 将dml的data转换为es的data
+     *
+     * @param mapping 配置mapping
+     * @param dmlData dml data
+     * @param esFieldData es data
+     * @return 返回 id 值
+     */
+    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData,
+                                       Map<String, Object> esFieldData) {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
+            Object value = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = value;
+            }
+
+            if (!fieldItem.getFieldName().equals(mapping.get_id())
+                && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(), value);
+            }
+        }
+        return resultIdVal;
+    }
+
+    /**
+     * 将dml的data, old转换为es的data
+     *
+     * @param mapping 配置mapping
+     * @param dmlData dml data
+     * @param esFieldData es data
+     * @return 返回 id 值
+     */
+    public Object getESDataFromDmlData(ESMapping mapping, Map<String, Object> dmlData, Map<String, Object> dmlOld,
+                                       Map<String, Object> esFieldData) {
+        SchemaItem schemaItem = mapping.getSchemaItem();
+        String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id();
+        Object resultIdVal = null;
+        for (FieldItem fieldItem : schemaItem.getSelectFields().values()) {
+            String columnName = fieldItem.getColumnItems().iterator().next().getColumnName();
+
+            if (fieldItem.getFieldName().equals(idFieldName)) {
+                resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName);
+            }
+
+            if (dmlOld.get(columnName) != null && !mapping.getSkips().contains(fieldItem.getFieldName())) {
+                esFieldData.put(fieldItem.getFieldName(),
+                    getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName));
+            }
+        }
+        return resultIdVal;
+    }
+
+    /**
+     * es 字段类型本地缓存
+     */
+    private static ConcurrentMap<String, Map<String, String>> esFieldTypes = new ConcurrentHashMap<>();
+
+    /**
+     * 获取es mapping中的属性类型
+     *
+     * @param mapping mapping配置
+     * @param fieldName 属性名
+     * @return 类型
+     */
+    @SuppressWarnings("unchecked")
+    private String getEsType(ESMapping mapping, String fieldName) {
+        String key = mapping.get_index() + "-" + mapping.get_type();
+        Map<String, String> fieldType = esFieldTypes.get(key);
+        if (fieldType == null) {
+            ImmutableOpenMap<String, MappingMetaData> mappings;
+            try {
+                mappings = transportClient.admin()
+                    .cluster()
+                    .prepareState()
+                    .execute()
+                    .actionGet()
+                    .getState()
+                    .getMetaData()
+                    .getIndices()
+                    .get(mapping.get_index())
+                    .getMappings();
+            } catch (NullPointerException e) {
+                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
+            }
+            MappingMetaData mappingMetaData = mappings.get(mapping.get_type());
+            if (mappingMetaData == null) {
+                throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index());
+            }
+
+            fieldType = new LinkedHashMap<>();
+
+            Map<String, Object> sourceMap = mappingMetaData.getSourceAsMap();
+            Map<String, Object> esMapping = (Map<String, Object>) sourceMap.get("properties");
+            for (Map.Entry<String, Object> entry : esMapping.entrySet()) {
+                Map<String, Object> value = (Map<String, Object>) entry.getValue();
+                if (value.containsKey("properties")) {
+                    fieldType.put(entry.getKey(), "object");
+                } else {
+                    fieldType.put(entry.getKey(), (String) value.get("type"));
+                }
+            }
+            esFieldTypes.put(key, fieldType);
+        }
+
+        return fieldType.get(fieldName);
+    }
+}

+ 1 - 0
client-adapter/elasticsearch/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter

@@ -0,0 +1 @@
+es=com.alibaba.otter.canal.client.adapter.es.ESAdapter

+ 16 - 0
client-adapter/elasticsearch/src/main/resources/es/mytest_user.yml

@@ -0,0 +1,16 @@
+dataSourceKey: defaultDS
+destination: example
+esMapping:
+  _index: mytest_user
+  _type: _doc
+  _id: _id
+#  pk: id
+  sql: "select a.id as _id, a.name as _name, a.role_id as _role_id, b.role_name as _role_name,
+        a.c_time as _c_time, c.labels as _labels from user a
+        left join role b on b.id=a.role_id
+        left join (select user_id, group_concat(label order by id desc separator ';') as labels from label
+        group by user_id) c on c.user_id=a.id"
+#  objFields:
+#    _labels: array:;
+  etlCondition: "where a.c_time>='{0}'"
+  commitBatch: 3000

+ 38 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/ConfigLoadTest.java

@@ -0,0 +1,38 @@
+package com.alibaba.otter.canal.client.adapter.es.test;
+
+import java.util.Map;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfigLoader;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+
+public class ConfigLoadTest {
+
+    @Before
+    public void before() {
+        // AdapterConfigs.put("es", "mytest_user.yml");
+        // 加载数据源连接池
+        DatasourceConfig.DATA_SOURCES.put("defaultDS", TestConstant.dataSource);
+    }
+
+    @Test
+    public void testLoad() {
+        Map<String, ESSyncConfig> configMap = ESSyncConfigLoader.load();
+        ESSyncConfig config = configMap.get("mytest_user.yml");
+        Assert.assertNotNull(config);
+        Assert.assertEquals("defaultDS", config.getDataSourceKey());
+        ESSyncConfig.ESMapping esMapping = config.getEsMapping();
+        Assert.assertEquals("mytest_user", esMapping.get_index());
+        Assert.assertEquals("_doc", esMapping.get_type());
+        Assert.assertEquals("id", esMapping.get_id());
+        Assert.assertNotNull(esMapping.getSql());
+
+        // Map<String, List<ESSyncConfig>> dbTableEsSyncConfig =
+        // ESSyncConfigLoader.getDbTableEsSyncConfig();
+        // Assert.assertFalse(dbTableEsSyncConfig.isEmpty());
+    }
+}

+ 47 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/SqlParseTest.java

@@ -0,0 +1,47 @@
+package com.alibaba.otter.canal.client.adapter.es.test;
+
+import java.util.List;
+import java.util.Map;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.FieldItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SchemaItem.TableItem;
+import com.alibaba.otter.canal.client.adapter.es.config.SqlParser;
+
+public class SqlParseTest {
+
+    @Test
+    public void parseTest() {
+        String sql = "select a.id, concat(a.name,'_test') as name, a.role_id, b.name as role_name, c.labels from user a "
+                     + "left join role b on a.role_id=b.id "
+                     + "left join (select user_id, group_concat(label,',') as labels from user_label "
+                     + "group by user_id) c on c.user_id=a.id";
+        SchemaItem schemaItem = SqlParser.parse(sql);
+
+        // 通过表名找 TableItem
+        List<TableItem> tableItems = schemaItem.getTableItemAliases().get("user_label".toLowerCase());
+        tableItems.forEach(tableItem -> Assert.assertEquals("c", tableItem.getAlias()));
+
+        TableItem tableItem = tableItems.get(0);
+        Assert.assertFalse(tableItem.isMain());
+        Assert.assertTrue(tableItem.isSubQuery());
+        // 通过字段名找 FieldItem
+        List<FieldItem> fieldItems = schemaItem.getColumnFields().get(tableItem.getAlias() + ".label".toLowerCase());
+        fieldItems.forEach(
+            fieldItem -> Assert.assertEquals("c.labels", fieldItem.getOwner() + "." + fieldItem.getFieldName()));
+
+        // 获取当前表关联条件字段
+        Map<FieldItem, List<FieldItem>> relationTableFields = tableItem.getRelationTableFields();
+        relationTableFields.keySet()
+            .forEach(fieldItem -> Assert.assertEquals("user_id", fieldItem.getColumn().getColumnName()));
+
+        // 获取关联字段在select中的对应字段
+        // List<FieldItem> relationSelectFieldItem =
+        // tableItem.getRelationKeyFieldItems();
+        // relationSelectFieldItem.forEach(fieldItem -> Assert.assertEquals("c.labels",
+        // fieldItem.getOwner() + "." + fieldItem.getColumn().getColumnName()));
+    }
+}

+ 40 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/TestConstant.java

@@ -0,0 +1,40 @@
+package com.alibaba.otter.canal.client.adapter.es.test;
+
+import java.sql.SQLException;
+
+import com.alibaba.druid.pool.DruidDataSource;
+
+public class TestConstant {
+
+    public final static String    jdbcUrl      = "jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true";
+    public final static String    jdbcUser     = "root";
+    public final static String    jdbcPassword = "121212";
+
+    public final static String    esHosts      = "127.0.0.1:9300";
+    public final static String    clusterNmae  = "elasticsearch";
+
+    public static DruidDataSource dataSource;
+
+    static {
+        dataSource = new DruidDataSource();
+        dataSource.setDriverClassName("com.mysql.jdbc.Driver");
+        dataSource.setUrl(jdbcUrl);
+        dataSource.setUsername(jdbcUser);
+        dataSource.setPassword(jdbcPassword);
+        dataSource.setInitialSize(1);
+        dataSource.setMinIdle(1);
+        dataSource.setMaxActive(1);
+        dataSource.setMaxWait(60000);
+        dataSource.setTimeBetweenEvictionRunsMillis(60000);
+        dataSource.setMinEvictableIdleTimeMillis(300000);
+        dataSource.setPoolPreparedStatements(false);
+        dataSource.setMaxPoolPreparedStatementPerConnectionSize(20);
+        dataSource.setValidationQuery("select 1");
+        try {
+            dataSource.init();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+    }
+
+}

+ 68 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/Common.java

@@ -0,0 +1,68 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.sql.DataSource;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.test.TestConstant;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+
+public class Common {
+
+    public static ESAdapter init() {
+        DatasourceConfig.DATA_SOURCES.put("defaultDS", TestConstant.dataSource);
+
+        OuterAdapterConfig outerAdapterConfig = new OuterAdapterConfig();
+        outerAdapterConfig.setName("es");
+        outerAdapterConfig.setHosts(TestConstant.esHosts);
+        Map<String, String> properties = new HashMap<>();
+        properties.put("cluster.name", TestConstant.clusterNmae);
+        outerAdapterConfig.setProperties(properties);
+
+        ESAdapter esAdapter = new ESAdapter();
+        esAdapter.init(outerAdapterConfig);
+        return esAdapter;
+    }
+
+    public static void sqlExe(DataSource dataSource, String sql) {
+        Connection conn = null;
+        Statement stmt = null;
+        try {
+            conn = dataSource.getConnection();
+            conn.setAutoCommit(false);
+            stmt = conn.createStatement();
+            stmt.execute(sql);
+            conn.commit();
+        } catch (Exception e) {
+            if (conn != null) {
+                try {
+                    conn.rollback();
+                } catch (SQLException e1) {
+                    // ignore
+                }
+            }
+            e.printStackTrace();
+        } finally {
+            if (stmt != null) {
+                try {
+                    stmt.close();
+                } catch (SQLException e) {
+                    // ignore
+                }
+            }
+            if (conn != null) {
+                try {
+                    conn.close();
+                } catch (SQLException e) {
+                    // ignore
+                }
+            }
+        }
+    }
+}

+ 130 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/LabelSyncJoinSub2Test.java

@@ -0,0 +1,130 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class LabelSyncJoinSub2Test {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_join_sub2.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 带函数子查询从表插入
+     */
+    @Test
+    public void test01() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from label where id=1 or id=2");
+        Common.sqlExe(ds, "insert into label (id,user_id,label) values (1,1,'a')");
+        Common.sqlExe(ds, "insert into label (id,user_id,label) values (2,1,'b')");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 2L);
+        data.put("user_id", 1L);
+        data.put("label", "b");
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b;a_", response.getSource().get("_labels"));
+    }
+
+    /**
+     * 带函数子查询从表更新
+     */
+    @Test
+    public void test02() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "update label set label='aa' where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("user_id", 1L);
+        data.put("label", "aa");
+        dml.setData(dataList);
+
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("label", "v");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b;aa_", response.getSource().get("_labels"));
+    }
+
+    /**
+     * 带函数子查询从表删除
+     */
+    @Test
+    public void test03() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from label where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("DELETE");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("user_id", 1L);
+        data.put("label", "a");
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b_", response.getSource().get("_labels"));
+    }
+}

+ 130 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/LabelSyncJoinSubTest.java

@@ -0,0 +1,130 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class LabelSyncJoinSubTest {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_join_sub.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 子查询从表插入
+     */
+    @Test
+    public void test01() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from label where id=1 or id=2");
+        Common.sqlExe(ds, "insert into label (id,user_id,label) values (1,1,'a')");
+        Common.sqlExe(ds, "insert into label (id,user_id,label) values (2,1,'b')");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 2L);
+        data.put("user_id", 1L);
+        data.put("label", "b");
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b;a", response.getSource().get("_labels"));
+    }
+
+    /**
+     * 子查询从表更新
+     */
+    @Test
+    public void test02() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "update label set label='aa' where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("user_id", 1L);
+        data.put("label", "aa");
+        dml.setData(dataList);
+
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("label", "a");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b;aa", response.getSource().get("_labels"));
+    }
+
+    /**
+     * 子查询从表删除
+     */
+    @Test
+    public void test03() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from label where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("DELETE");
+        dml.setDatabase("mytest");
+        dml.setTable("label");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("user_id", 1L);
+        data.put("label", "a");
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("b", response.getSource().get("_labels"));
+    }
+}

+ 95 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/RoleSyncJoinOne2Test.java

@@ -0,0 +1,95 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class RoleSyncJoinOne2Test {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_join_one2.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 带函数非子查询从表插入
+     */
+    @Test
+    public void test01() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from role where id=1");
+        Common.sqlExe(ds, "insert into role (id,role_name) values (1,'admin')");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("role");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_name", "admin");
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("admin_", response.getSource().get("_role_name"));
+    }
+
+    /**
+     * 带函数非子查询从表更新
+     */
+    @Test
+    public void test02() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "update role set role_name='admin3' where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("role");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_name", "admin3");
+        dml.setData(dataList);
+
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("role_name", "admin");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("admin3_", response.getSource().get("_role_name"));
+    }
+}

+ 191 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/RoleSyncJoinOneTest.java

@@ -0,0 +1,191 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class RoleSyncJoinOneTest {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_join_one.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 非子查询从表插入
+     */
+    @Test
+    public void test01() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from role where id=1");
+        Common.sqlExe(ds, "insert into role (id,role_name) values (1,'admin')");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("role");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_name", "admin");
+
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("admin", response.getSource().get("_role_name"));
+    }
+
+    /**
+     * 非子查询从表更新
+     */
+    @Test
+    public void test02() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "update role set role_name='admin2' where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("role");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_name", "admin2");
+        dml.setData(dataList);
+
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("role_name", "admin");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("admin2", response.getSource().get("_role_name"));
+    }
+
+    /**
+     * 主表更新外键值
+     */
+    @Test
+    public void test03() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from role where id=2");
+        Common.sqlExe(ds, "insert into role (id,role_name) values (2,'operator')");
+        Common.sqlExe(ds, "update user set role_id=2 where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_id", 2L);
+        dml.setData(dataList);
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("role_id", 1L);
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("operator", response.getSource().get("_role_name"));
+
+        Common.sqlExe(ds, "update user set role_id=1 where id=1");
+
+        Dml dml2 = new Dml();
+        dml2.setDestination("example");
+        dml2.setTs(new Date().getTime());
+        dml2.setType("UPDATE");
+        dml2.setDatabase("mytest");
+        dml2.setTable("user");
+        List<Map<String, Object>> dataList2 = new ArrayList<>();
+        Map<String, Object> data2 = new LinkedHashMap<>();
+        dataList2.add(data2);
+        data2.put("id", 1L);
+        data2.put("role_id", 1L);
+        dml2.setData(dataList2);
+        List<Map<String, Object>> oldList2 = new ArrayList<>();
+        Map<String, Object> old2 = new LinkedHashMap<>();
+        oldList2.add(old2);
+        old2.put("role_id", 2L);
+        dml2.setOld(oldList2);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml2);
+
+        GetResponse response2 = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("admin2", response2.getSource().get("_role_name"));
+    }
+
+    /**
+     * 非子查询从表删除
+     */
+    @Test
+    public void test04() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from role where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("DELETE");
+        dml.setDatabase("mytest");
+        dml.setTable("role");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("role_name", "admin");
+
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertNull(response.getSource().get("_role_name"));
+    }
+}

+ 96 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/UserSyncJoinOneTest.java

@@ -0,0 +1,96 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import javax.sql.DataSource;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class UserSyncJoinOneTest {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_join_one.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 主表带函数插入
+     */
+    @Test
+    public void test01() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "delete from user where id=1");
+        Common.sqlExe(ds, "insert into user (id,name,role_id) values (1,'Eric',1)");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("name", "Eric");
+        data.put("role_id", 1L);
+        data.put("c_time", new Date());
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("Eric_", response.getSource().get("_name"));
+    }
+
+    /**
+     * 主表带函数更新
+     */
+    @Test
+    public void test02() {
+        DataSource ds = DatasourceConfig.DATA_SOURCES.get("defaultDS");
+        Common.sqlExe(ds, "update user set name='Eric2' where id=1");
+
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("name", "Eric2");
+        dml.setData(dataList);
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("name", "Eric");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("Eric2_", response.getSource().get("_name"));
+    }
+}

+ 122 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/UserSyncSingleTest.java

@@ -0,0 +1,122 @@
+package com.alibaba.otter.canal.client.adapter.es.test.sync;
+
+import java.util.*;
+
+import org.elasticsearch.action.get.GetResponse;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.alibaba.otter.canal.client.adapter.es.ESAdapter;
+import com.alibaba.otter.canal.client.adapter.es.config.ESSyncConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+public class UserSyncSingleTest {
+
+    private ESAdapter esAdapter;
+
+    @Before
+    public void init() {
+        // AdapterConfigs.put("es", "mytest_user_single.yml");
+        esAdapter = Common.init();
+    }
+
+    /**
+     * 单表插入
+     */
+    @Test
+    public void test01() {
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("INSERT");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("name", "Eric");
+        data.put("role_id", 1L);
+        data.put("c_time", new Date());
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("Eric", response.getSource().get("_name"));
+    }
+
+    /**
+     * 单表更新
+     */
+    @Test
+    public void test02() {
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("UPDATE");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("name", "Eric2");
+        dml.setData(dataList);
+        List<Map<String, Object>> oldList = new ArrayList<>();
+        Map<String, Object> old = new LinkedHashMap<>();
+        oldList.add(old);
+        old.put("name", "Eric");
+        dml.setOld(oldList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertEquals("Eric2", response.getSource().get("_name"));
+    }
+
+    /**
+     * 单表删除
+     */
+    @Test
+    public void test03() {
+        Dml dml = new Dml();
+        dml.setDestination("example");
+        dml.setTs(new Date().getTime());
+        dml.setType("DELETE");
+        dml.setDatabase("mytest");
+        dml.setTable("user");
+        List<Map<String, Object>> dataList = new ArrayList<>();
+        Map<String, Object> data = new LinkedHashMap<>();
+        dataList.add(data);
+        data.put("id", 1L);
+        data.put("name", "Eric");
+        data.put("role_id", 1L);
+        data.put("c_time", new Date());
+        dml.setData(dataList);
+
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, ESSyncConfig> esSyncConfigs = esAdapter.getDbTableEsSyncConfig().get(database + "-" + table);
+
+        esAdapter.getEsSyncService().sync(esSyncConfigs.values(), dml);
+
+        GetResponse response = esAdapter.getTransportClient().prepareGet("mytest_user", "_doc", "1").get();
+        Assert.assertNull(response.getSource());
+    }
+
+    // @After
+    // public void after() {
+    // esAdapter.destroy();
+    // DatasourceConfig.DATA_SOURCES.values().forEach(DruidDataSource::close);
+    // }
+}

+ 39 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/db_schema.sql

@@ -0,0 +1,39 @@
+-- ----------------------------
+-- Table structure for label
+-- ----------------------------
+DROP TABLE IF EXISTS `label`;
+CREATE TABLE `label` (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT,
+  `user_id` bigint(20) NOT NULL,
+  `label` varchar(30) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for role
+-- ----------------------------
+DROP TABLE IF EXISTS `role`;
+CREATE TABLE `role` (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT,
+  `role_name` varchar(30) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for user
+-- ----------------------------
+DROP TABLE IF EXISTS `user`;
+CREATE TABLE `user` (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT,
+  `name` varchar(30) NOT NULL,
+  `c_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  `role_id` bigint(20) DEFAULT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
+
+insert into user (id,name,role_id) values (1,'Eric',1);
+insert into role (id,role_name) values (1,'admin');
+insert into role (id,role_name) values (2,'operator');
+insert into label (id,user_id,label) values (1,1,'a');
+insert into label (id,user_id,label) values (2,1,'b');
+commit;

+ 21 - 0
client-adapter/elasticsearch/src/test/java/com/alibaba/otter/canal/client/adapter/es/test/sync/es_mapping.json

@@ -0,0 +1,21 @@
+{
+  "_doc": {
+    "properties": {
+      "_name": {
+        "type": "text"
+      },
+      "_role_id": {
+        "type": "long"
+      },
+      "_role_name": {
+        "type": "text"
+      },
+      "_labels": {
+        "type": "text"
+      },
+      "_c_time": {
+        "type": "date"
+      }
+    }
+  }
+}

+ 8 - 0
client-adapter/elasticsearch/src/test/resources/es/mytest_user_single.yml_

@@ -0,0 +1,8 @@
+dataSourceKey: defaultDS
+destination: example
+esMapping:
+  _index: mytest_user
+  _type: _doc
+  _id: _id
+  sql: "select a.id as _id, a.name as _name, a.role_id as _role_id, a.c_time as _c_time from user a"
+  commitBatch: 3000

+ 13 - 0
client-adapter/elasticsearch/src/test/resources/log4j2-test.xml

@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Configuration status="WARN">
+    <Appenders>
+        <Console name="Console" target="SYSTEM_OUT">
+            <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+        </Console>
+    </Appenders>
+    <Loggers>
+        <Root level="ERROR">
+            <AppenderRef ref="Console"/>
+        </Root>
+    </Loggers>
+</Configuration>

+ 1 - 7
kafka-client/src/test/resources/logback.xml → client-adapter/elasticsearch/src/test/resources/logback-test.xml

@@ -1,5 +1,4 @@
 <configuration scan="true" scanPeriod=" 5 seconds">
-
 	<jmxConfigurator />
 	<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
 		<encoder>
@@ -8,12 +7,7 @@
 		</encoder>
 	</appender>
 
-	<logger name="org.apache.kafka" additivity="false">
-		<level value="ERROR" />
-		<appender-ref ref="STDOUT" />
-	</logger>
-
-	<root level="INFO">
+	<root level="TRACE">
 		<appender-ref ref="STDOUT"/>
 	</root>
 </configuration>

+ 88 - 0
client-adapter/hbase/pom.xml

@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.hbase</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter hbase module for otter ${project.version}</name>
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.common</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.19</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>1.1.2</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>12.0.1</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <configuration>
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                        <configuration>
+                            <tasks>
+                                <copy todir="${project.basedir}/../launcher/target/classes/hbase" overwrite="true">
+                                    <fileset dir="${project.basedir}/target/classes/hbase" erroronmissingdir="true">
+                                        <include name="*.yml" />
+                                    </fileset>
+                                </copy>
+                            </tasks>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

+ 213 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java

@@ -0,0 +1,213 @@
+package com.alibaba.otter.canal.client.adapter.hbase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfigLoader;
+import com.alibaba.otter.canal.client.adapter.hbase.monitor.HbaseConfigMonitor;
+import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseEtlService;
+import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseSyncService;
+import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+import com.alibaba.otter.canal.client.adapter.support.SPI;
+
+/**
+ * HBase外部适配器
+ *
+ * @author machengyuan 2018-8-21 下午8:45:38
+ * @version 1.0.0
+ */
+@SPI("hbase")
+public class HbaseAdapter implements OuterAdapter {
+
+    private static Logger                           logger             = LoggerFactory.getLogger(HbaseAdapter.class);
+
+    private Map<String, MappingConfig>              hbaseMapping       = new ConcurrentHashMap<>();                  // 文件名对应配置
+    private Map<String, Map<String, MappingConfig>> mappingConfigCache = new ConcurrentHashMap<>();                  // 库名-表名对应配置
+
+    private Connection                              conn;
+    private HbaseSyncService                        hbaseSyncService;
+    private HbaseTemplate                           hbaseTemplate;
+
+    private HbaseConfigMonitor                      configMonitor;
+
+    public Map<String, MappingConfig> getHbaseMapping() {
+        return hbaseMapping;
+    }
+
+    public Map<String, Map<String, MappingConfig>> getMappingConfigCache() {
+        return mappingConfigCache;
+    }
+
+    @Override
+    public void init(OuterAdapterConfig configuration) {
+        try {
+            Map<String, MappingConfig> hbaseMappingTmp = MappingConfigLoader.load();
+            // 过滤不匹配的key的配置
+            hbaseMappingTmp.forEach((key, mappingConfig) -> {
+                if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null)
+                    || (mappingConfig.getOuterAdapterKey() != null
+                        && mappingConfig.getOuterAdapterKey().equalsIgnoreCase(configuration.getKey()))) {
+                    hbaseMapping.put(key, mappingConfig);
+                }
+            });
+            for (Map.Entry<String, MappingConfig> entry : hbaseMapping.entrySet()) {
+                String configName = entry.getKey();
+                MappingConfig mappingConfig = entry.getValue();
+                String k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                           + mappingConfig.getHbaseMapping().getDatabase() + "."
+                           + mappingConfig.getHbaseMapping().getTable();
+                Map<String, MappingConfig> configMap = mappingConfigCache.computeIfAbsent(k, k1 -> new HashMap<>());
+                configMap.put(configName, mappingConfig);
+            }
+
+            Map<String, String> properties = configuration.getProperties();
+
+            Configuration hbaseConfig = HBaseConfiguration.create();
+            properties.forEach(hbaseConfig::set);
+            conn = ConnectionFactory.createConnection(hbaseConfig);
+            hbaseTemplate = new HbaseTemplate(conn);
+            hbaseSyncService = new HbaseSyncService(hbaseTemplate);
+
+            configMonitor = new HbaseConfigMonitor();
+            configMonitor.init(this);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void sync(List<Dml> dmls) {
+        for (Dml dml : dmls) {
+            sync(dml);
+        }
+    }
+
+    public void sync(Dml dml) {
+        if (dml == null) {
+            return;
+        }
+        String destination = StringUtils.trimToEmpty(dml.getDestination());
+        String database = dml.getDatabase();
+        String table = dml.getTable();
+        Map<String, MappingConfig> configMap = mappingConfigCache.get(destination + "." + database + "." + table);
+        configMap.values().forEach(config -> hbaseSyncService.sync(config, dml));
+    }
+
+    @Override
+    public EtlResult etl(String task, List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        MappingConfig config = hbaseMapping.get(task);
+        if (config != null) {
+            DataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            if (dataSource != null) {
+                return HbaseEtlService.importData(dataSource, hbaseTemplate, config, params);
+            } else {
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("DataSource not found");
+                return etlResult;
+            }
+        } else {
+            StringBuilder resultMsg = new StringBuilder();
+            boolean resSucc = true;
+            // ds不为空说明传入的是datasourceKey
+            for (MappingConfig configTmp : hbaseMapping.values()) {
+                // 取所有的destination为task的配置
+                if (configTmp.getDestination().equals(task)) {
+                    DataSource dataSource = DatasourceConfig.DATA_SOURCES.get(configTmp.getDataSourceKey());
+                    if (dataSource == null) {
+                        continue;
+                    }
+                    EtlResult etlRes = HbaseEtlService.importData(dataSource, hbaseTemplate, configTmp, params);
+                    if (!etlRes.getSucceeded()) {
+                        resSucc = false;
+                        resultMsg.append(etlRes.getErrorMessage()).append("\n");
+                    } else {
+                        resultMsg.append(etlRes.getResultMessage()).append("\n");
+                    }
+                }
+            }
+            if (resultMsg.length() > 0) {
+                etlResult.setSucceeded(resSucc);
+                if (resSucc) {
+                    etlResult.setResultMessage(resultMsg.toString());
+                } else {
+                    etlResult.setErrorMessage(resultMsg.toString());
+                }
+                return etlResult;
+            }
+        }
+        etlResult.setSucceeded(false);
+        etlResult.setErrorMessage("Task not found");
+        return etlResult;
+    }
+
+    @Override
+    public Map<String, Object> count(String task) {
+        MappingConfig config = hbaseMapping.get(task);
+        String hbaseTable = config.getHbaseMapping().getHbaseTable();
+        long rowCount = 0L;
+        try {
+            HTable table = (HTable) conn.getTable(TableName.valueOf(hbaseTable));
+            Scan scan = new Scan();
+            scan.setFilter(new FirstKeyOnlyFilter());
+            ResultScanner resultScanner = table.getScanner(scan);
+            for (Result result : resultScanner) {
+                rowCount += result.size();
+            }
+        } catch (IOException e) {
+            logger.error(e.getMessage(), e);
+        }
+        Map<String, Object> res = new LinkedHashMap<>();
+        res.put("hbaseTable", hbaseTable);
+        res.put("count", rowCount);
+        return res;
+    }
+
+    @Override
+    public void destroy() {
+        if (configMonitor != null) {
+            configMonitor.destroy();
+        }
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    @Override
+    public String getDestination(String task) {
+        MappingConfig config = hbaseMapping.get(task);
+        if (config != null && config.getHbaseMapping() != null) {
+            return config.getDestination();
+        }
+        return null;
+    }
+}

+ 394 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfig.java

@@ -0,0 +1,394 @@
+package com.alibaba.otter.canal.client.adapter.hbase.config;
+
+import java.util.*;
+
+/**
+ * HBase表映射配置
+ *
+ * @author rewerma 2018-8-21 下午06:45:49
+ * @version 1.0.0
+ */
+public class MappingConfig {
+
+    private String       dataSourceKey;   // 数据源key
+
+    private String       outerAdapterKey; // adapter key
+
+    private String       destination;     // canal实例或MQ的topic
+
+    private HbaseMapping hbaseMapping;    // hbase映射配置
+
+    public String getDataSourceKey() {
+        return dataSourceKey;
+    }
+
+    public void setDataSourceKey(String dataSourceKey) {
+        this.dataSourceKey = dataSourceKey;
+    }
+
+    public String getOuterAdapterKey() {
+        return outerAdapterKey;
+    }
+
+    public void setOuterAdapterKey(String outerAdapterKey) {
+        this.outerAdapterKey = outerAdapterKey;
+    }
+
+    public String getDestination() {
+        return destination;
+    }
+
+    public void setDestination(String destination) {
+        this.destination = destination;
+    }
+
+    public HbaseMapping getHbaseMapping() {
+        return hbaseMapping;
+    }
+
+    public void setHbaseMapping(HbaseMapping hbaseMapping) {
+        this.hbaseMapping = hbaseMapping;
+    }
+
+    public void validate() {
+        if (hbaseMapping.database == null || hbaseMapping.database.isEmpty()) {
+            throw new NullPointerException("hbaseMapping.database");
+        }
+        if (hbaseMapping.table == null || hbaseMapping.table.isEmpty()) {
+            throw new NullPointerException("hbaseMapping.table");
+        }
+        if (hbaseMapping.hbaseTable == null || hbaseMapping.hbaseTable.isEmpty()) {
+            throw new NullPointerException("hbaseMapping.hbaseTable");
+        }
+        if (hbaseMapping.mode == null) {
+            throw new NullPointerException("hbaseMapping.mode");
+        }
+        if (hbaseMapping.rowKey != null && hbaseMapping.rowKeyColumn != null) {
+            throw new RuntimeException("已配置了复合主键作为RowKey,无需再指定RowKey列");
+        }
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        MappingConfig config = (MappingConfig) o;
+
+        return hbaseMapping != null ? hbaseMapping.equals(config.hbaseMapping) : config.hbaseMapping == null;
+    }
+
+    @Override
+    public int hashCode() {
+        return hbaseMapping != null ? hbaseMapping.hashCode() : 0;
+    }
+
+    public static class ColumnItem {
+
+        private boolean isRowKey = false;
+        private Integer rowKeyLen;
+        private String  column;
+        private String  family;
+        private String  qualifier;
+        private String  type;
+
+        public boolean isRowKey() {
+            return isRowKey;
+        }
+
+        public void setRowKey(boolean rowKey) {
+            isRowKey = rowKey;
+        }
+
+        public Integer getRowKeyLen() {
+            return rowKeyLen;
+        }
+
+        public void setRowKeyLen(Integer rowKeyLen) {
+            this.rowKeyLen = rowKeyLen;
+        }
+
+        public String getColumn() {
+            return column;
+        }
+
+        public void setColumn(String column) {
+            this.column = column;
+        }
+
+        public String getFamily() {
+            return family;
+        }
+
+        public void setFamily(String family) {
+            this.family = family;
+        }
+
+        public String getQualifier() {
+            return qualifier;
+        }
+
+        public void setQualifier(String qualifier) {
+            this.qualifier = qualifier;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public void setType(String type) {
+            this.type = type;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+            ColumnItem that = (ColumnItem) o;
+            return Objects.equals(column, that.column);
+        }
+
+        @Override
+        public int hashCode() {
+
+            return Objects.hash(column);
+        }
+    }
+
+    public enum Mode {
+                      STRING("STRING"), NATIVE("NATIVE"), PHOENIX("PHOENIX");
+
+        private String type;
+
+        public String getType() {
+            return type;
+        }
+
+        Mode(String type){
+            this.type = type;
+        }
+    }
+
+    public static class HbaseMapping {
+
+        private Mode                    mode               = Mode.STRING;           // hbase默认转换格式
+        private String                  database;                                   // 数据库名或schema名
+        private String                  table;                                      // 表面名
+        private String                  hbaseTable;                                 // hbase表名
+        private String                  family             = "CF";                  // 默认统一column family
+        private boolean                 uppercaseQualifier = true;                  // 是否转大写
+        private boolean                 autoCreateTable    = false;                 // 同步时HBase中表不存在的情况下自动建表
+        private String                  rowKey;                                     // 指定复合主键为rowKey
+        private Map<String, String>     columns;                                    // 字段映射
+        private List<String>            excludeColumns;                             // 不映射的字段
+        private ColumnItem              rowKeyColumn;                               // rowKey字段
+        private String                  etlCondition;                               // etl条件sql
+
+        private Map<String, ColumnItem> columnItems        = new LinkedHashMap<>(); // 转换后的字段映射列表
+        private Set<String>             families           = new LinkedHashSet<>(); // column family列表
+        private int                     readBatch          = 5000;
+        private int                     commitBatch        = 5000;                  // etl等批量提交大小
+
+        public Mode getMode() {
+            return mode;
+        }
+
+        public void setMode(Mode mode) {
+            this.mode = mode;
+        }
+
+        public String getDatabase() {
+            return database;
+        }
+
+        public void setDatabase(String database) {
+            this.database = database;
+        }
+
+        public String getTable() {
+            return table;
+        }
+
+        public void setTable(String table) {
+            this.table = table;
+        }
+
+        public String getHbaseTable() {
+            return hbaseTable;
+        }
+
+        public void setHbaseTable(String hbaseTable) {
+            this.hbaseTable = hbaseTable;
+        }
+
+        public Map<String, String> getColumns() {
+            return columns;
+        }
+
+        public boolean isAutoCreateTable() {
+            return autoCreateTable;
+        }
+
+        public void setAutoCreateTable(boolean autoCreateTable) {
+            this.autoCreateTable = autoCreateTable;
+        }
+
+        public int getReadBatch() {
+            return readBatch;
+        }
+
+        public void setReadBatch(int readBatch) {
+            this.readBatch = readBatch;
+        }
+
+        public int getCommitBatch() {
+            return commitBatch;
+        }
+
+        public void setCommitBatch(int commitBatch) {
+            this.commitBatch = commitBatch;
+        }
+
+        public String getRowKey() {
+            return rowKey;
+        }
+
+        public void setRowKey(String rowKey) {
+            this.rowKey = rowKey;
+        }
+
+        public String getEtlCondition() {
+            return etlCondition;
+        }
+
+        public void setEtlCondition(String etlCondition) {
+            this.etlCondition = etlCondition;
+        }
+
+        public void setColumns(Map<String, String> columns) {
+            this.columns = columns;
+
+            if (columns != null) {
+                for (Map.Entry<String, String> columnField : columns.entrySet()) {
+                    String field = columnField.getValue();
+                    String type = null;
+                    if (field != null) {
+                        // 解析类型
+                        int i = field.indexOf("$");
+                        if (i > -1) {
+                            type = field.substring(i + 1);
+                            field = field.substring(0, i);
+                        }
+                    }
+                    ColumnItem columnItem = new ColumnItem();
+                    columnItem.setColumn(columnField.getKey());
+                    columnItem.setType(type);
+                    if (field != null && field.toUpperCase().startsWith("ROWKEY")) {
+                        int idx = field.toUpperCase().indexOf("LEN:");
+                        if (idx > -1) {
+                            String len = field.substring(idx + 4);
+                            try {
+                                columnItem.setRowKeyLen(Integer.parseInt(len));
+                            } catch (Exception e) {
+                                // ignore
+                            }
+                        }
+                        columnItem.setRowKey(true);
+                        rowKeyColumn = columnItem;
+                    } else {
+                        if (field == null || field.equals("")) {
+                            columnItem.setFamily(family);
+                            columnItem.setQualifier(columnField.getKey());
+                        } else {
+                            int len = field.indexOf(":");
+                            if (len > -1) {
+                                columnItem.setFamily(field.substring(0, len));
+                                columnItem.setQualifier(field.substring(len + 1));
+                            } else {
+                                columnItem.setFamily(family);
+                                columnItem.setQualifier(field);
+                            }
+                        }
+                        if (uppercaseQualifier) {
+                            columnItem.setQualifier(columnItem.getQualifier().toUpperCase());
+                        }
+                        families.add(columnItem.getFamily());
+                    }
+
+                    columnItems.put(columnField.getKey(), columnItem);
+                }
+            } else {
+                this.columns = new LinkedHashMap<>();
+            }
+        }
+
+        public List<String> getExcludeColumns() {
+            return excludeColumns;
+        }
+
+        public void setExcludeColumns(List<String> excludeColumns) {
+            this.excludeColumns = excludeColumns;
+        }
+
+        public String getFamily() {
+            return family;
+        }
+
+        public void setFamily(String family) {
+            this.family = family;
+            if (family == null) {
+                this.family = "CF";
+            }
+        }
+
+        public boolean isUppercaseQualifier() {
+            return uppercaseQualifier;
+        }
+
+        public void setUppercaseQualifier(boolean uppercaseQualifier) {
+            this.uppercaseQualifier = uppercaseQualifier;
+        }
+
+        public ColumnItem getRowKeyColumn() {
+            return rowKeyColumn;
+        }
+
+        public void setRowKeyColumn(ColumnItem rowKeyColumn) {
+            this.rowKeyColumn = rowKeyColumn;
+        }
+
+        public Map<String, ColumnItem> getColumnItems() {
+            return columnItems;
+        }
+
+        public void setColumnItems(Map<String, ColumnItem> columnItems) {
+            this.columnItems = columnItems;
+        }
+
+        public Set<String> getFamilies() {
+            return families;
+        }
+
+        public void setFamilies(Set<String> families) {
+            this.families = families;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+
+            HbaseMapping hbaseMapping = (HbaseMapping) o;
+
+            if (table != null ? !table.equals(hbaseMapping.table) : hbaseMapping.table != null) return false;
+            return hbaseTable != null ? hbaseTable.equals(hbaseMapping.hbaseTable) : hbaseMapping.hbaseTable == null;
+        }
+
+        @Override
+        public int hashCode() {
+            int result = table != null ? table.hashCode() : 0;
+            result = 31 * result + (hbaseTable != null ? hbaseTable.hashCode() : 0);
+            return result;
+        }
+    }
+}

+ 46 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java

@@ -0,0 +1,46 @@
+package com.alibaba.otter.canal.client.adapter.hbase.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+
+/**
+ * HBase表映射配置加载器
+ *
+ * @author rewerma 2018-8-21 下午06:45:49
+ * @version 1.0.0
+ */
+public class MappingConfigLoader {
+
+    private static Logger logger = LoggerFactory.getLogger(MappingConfigLoader.class);
+
+    /**
+     * 加载HBase表映射配置
+     * 
+     * @return 配置名/配置文件名--对象
+     */
+    public static Map<String, MappingConfig> load() {
+        logger.info("## Start loading hbase mapping config ... ");
+
+        Map<String, MappingConfig> result = new LinkedHashMap<>();
+
+        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("hbase");
+        configContentMap.forEach((fileName, content) -> {
+            MappingConfig config = new Yaml().loadAs(content, MappingConfig.class);
+            try {
+                config.validate();
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR load Config: " + fileName + " " + e.getMessage(), e);
+            }
+            result.put(fileName, config);
+        });
+
+        logger.info("## Hbase mapping config loaded");
+        return result;
+    }
+}

+ 129 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java

@@ -0,0 +1,129 @@
+package com.alibaba.otter.canal.client.adapter.hbase.monitor;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.hbase.HbaseAdapter;
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+public class HbaseConfigMonitor {
+
+    private static final Logger   logger      = LoggerFactory.getLogger(HbaseConfigMonitor.class);
+
+    private static final String   adapterName = "hbase";
+
+    private HbaseAdapter          hbaseAdapter;
+
+    private FileAlterationMonitor fileMonitor;
+
+    public void init(HbaseAdapter hbaseAdapter) {
+        this.hbaseAdapter = hbaseAdapter;
+        File confDir = Util.getConfDirPath(adapterName);
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileCreate(File file) {
+            super.onFileCreate(file);
+            try {
+                // 加载新增的配置文件
+                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                config.validate();
+                addConfigToCache(file, config);
+
+                logger.info("Add a new hbase mapping config: {} to canal adapter", file.getName());
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+
+            try {
+                if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) {
+                    // 加载配置文件
+                    String configContent = MappingConfigsLoader
+                        .loadConfig(adapterName + File.separator + file.getName());
+                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    config.validate();
+                    if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) {
+                        deleteConfigFromCache(file);
+                    }
+                    addConfigToCache(file, config);
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileDelete(File file) {
+            super.onFileDelete(file);
+
+            try {
+                if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) {
+                    deleteConfigFromCache(file);
+
+                    logger.info("Delete a hbase mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        private void addConfigToCache(File file, MappingConfig config) {
+            hbaseAdapter.getHbaseMapping().put(file.getName(), config);
+            Map<String, MappingConfig> configMap = hbaseAdapter.getMappingConfigCache()
+                .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "."
+                                 + config.getHbaseMapping().getDatabase() + "." + config.getHbaseMapping().getTable(),
+                    k1 -> new HashMap<>());
+            configMap.put(file.getName(), config);
+        }
+
+        private void deleteConfigFromCache(File file) {
+
+            hbaseAdapter.getHbaseMapping().remove(file.getName());
+            for (Map<String, MappingConfig> configMap : hbaseAdapter.getMappingConfigCache().values()) {
+                if (configMap != null) {
+                    configMap.remove(file.getName());
+                }
+            }
+
+        }
+    }
+}

+ 392 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseEtlService.java

@@ -0,0 +1,392 @@
+package com.alibaba.otter.canal.client.adapter.hbase.service;
+
+import java.sql.*;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+
+import javax.sql.DataSource;
+
+import com.alibaba.otter.canal.client.adapter.support.Util;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.support.*;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.JdbcTypeUtil;
+import com.google.common.base.Joiner;
+
+/**
+ * HBase ETL 操作业务类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+public class HbaseEtlService {
+
+    private static Logger logger = LoggerFactory.getLogger(HbaseEtlService.class);
+
+
+    /**
+     * 建表
+     * 
+     * @param hbaseTemplate
+     * @param config
+     */
+    public static void createTable(HbaseTemplate hbaseTemplate, MappingConfig config) {
+        try {
+            // 判断hbase表是否存在,不存在则建表
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            if (!hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                hbaseTemplate.createTable(hbaseMapping.getHbaseTable(), hbaseMapping.getFamily());
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 导入数据
+     * 
+     * @param ds 数据源
+     * @param hbaseTemplate hbaseTemplate
+     * @param config 配置
+     * @param params 筛选条件
+     * @return 导入结果
+     */
+    public static EtlResult importData(DataSource ds, HbaseTemplate hbaseTemplate, MappingConfig config,
+                                       List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong successCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String hbaseTable = "";
+        try {
+            if (config == null) {
+                logger.error("Config is null!");
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("Config is null!");
+                return etlResult;
+            }
+            MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+            hbaseTable = hbaseMapping.getHbaseTable();
+
+            long start = System.currentTimeMillis();
+
+            if (params != null && params.size() == 1 && "rebuild".equalsIgnoreCase(params.get(0))) {
+                logger.info(hbaseMapping.getHbaseTable() + " rebuild is starting!");
+                // 如果表存在则删除
+                if (hbaseTemplate.tableExists(hbaseMapping.getHbaseTable())) {
+                    hbaseTemplate.disableTable(hbaseMapping.getHbaseTable());
+                    hbaseTemplate.deleteTable(hbaseMapping.getHbaseTable());
+                }
+                params = null;
+            } else {
+                logger.info(hbaseMapping.getHbaseTable() + " etl is starting!");
+            }
+            createTable(hbaseTemplate, config);
+
+            // 拼接sql
+            String sql = "SELECT * FROM " + config.getHbaseMapping().getDatabase() + "." + hbaseMapping.getTable();
+
+            // 拼接条件
+            if (params != null && params.size() == 1 && hbaseMapping.getEtlCondition() == null) {
+                AtomicBoolean stExists = new AtomicBoolean(false);
+                // 验证是否有SYS_TIME字段
+                Util.sqlRS(ds, sql, rs -> {
+                    try {
+                        ResultSetMetaData rsmd = rs.getMetaData();
+                        int cnt = rsmd.getColumnCount();
+                        for (int i = 1; i <= cnt; i++) {
+                            String columnName = rsmd.getColumnName(i);
+                            if ("SYS_TIME".equalsIgnoreCase(columnName)) {
+                                stExists.set(true);
+                                break;
+                            }
+                        }
+                    } catch (Exception e) {
+                        // ignore
+                    }
+                    return null;
+                });
+                if (stExists.get()) {
+                    sql += " WHERE SYS_TIME >= '" + params.get(0) + "' ";
+                }
+            } else if (hbaseMapping.getEtlCondition() != null && params != null) {
+                String etlCondition = hbaseMapping.getEtlCondition();
+                int size = params.size();
+                for (int i = 0; i < size; i++) {
+                    etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+                }
+
+                sql += " " + etlCondition;
+            }
+
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) Util.sqlRS(ds, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0 : count;
+            });
+
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3;
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                    }
+                    Future<Boolean> future = executor.submit(
+                        () -> executeSqlImport(ds, sqlFinal, hbaseMapping, hbaseTemplate, successCount, errMsg));
+                    futures.add(future);
+                }
+
+                for (Future<Boolean> future : futures) {
+                    future.get();
+                }
+
+                executor.shutdown();
+            } else {
+                executeSqlImport(ds, sql, hbaseMapping, hbaseTemplate, successCount, errMsg);
+            }
+
+            logger.info(hbaseMapping.getHbaseTable() + " etl completed in: "
+                        + (System.currentTimeMillis() - start) / 1000 + "s!");
+
+            etlResult.setResultMessage("导入HBase表 " + hbaseMapping.getHbaseTable() + " 数据:" + successCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
+        }
+
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    /**
+     * 执行导入
+     * 
+     * @param ds
+     * @param sql
+     * @param hbaseMapping
+     * @param hbaseTemplate
+     * @param successCount
+     * @param errMsg
+     * @return
+     */
+    private static boolean executeSqlImport(DataSource ds, String sql, MappingConfig.HbaseMapping hbaseMapping,
+                                            HbaseTemplate hbaseTemplate, AtomicLong successCount, List<String> errMsg) {
+        try {
+            Util.sqlRS(ds, sql, rs -> {
+                int i = 1;
+
+                try {
+                    boolean complete = false;
+                    List<HRow> rows = new ArrayList<>();
+                    String[] rowKeyColumns = null;
+                    if (hbaseMapping.getRowKey() != null) {
+                        rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                    }
+                    while (rs.next()) {
+                        int cc = rs.getMetaData().getColumnCount();
+                        int[] jdbcTypes = new int[cc];
+                        Class<?>[] classes = new Class[cc];
+                        for (int j = 1; j <= cc; j++) {
+                            int jdbcType = rs.getMetaData().getColumnType(j);
+                            jdbcTypes[j - 1] = jdbcType;
+                            classes[j - 1] = JdbcTypeUtil.jdbcType2javaType(jdbcType);
+                        }
+                        HRow row = new HRow();
+
+                        if (rowKeyColumns != null) {
+                            // 取rowKey字段拼接
+                            StringBuilder rowKeyVale = new StringBuilder();
+                            for (String rowKeyColumnName : rowKeyColumns) {
+                                Object obj = rs.getObject(rowKeyColumnName);
+                                if (obj != null) {
+                                    rowKeyVale.append(obj.toString());
+                                }
+                                rowKeyVale.append("|");
+                            }
+                            int len = rowKeyVale.length();
+                            if (len > 0) {
+                                rowKeyVale.delete(len - 1, len);
+                            }
+                            row.setRowKey(Bytes.toBytes(rowKeyVale.toString()));
+                        }
+
+                        for (int j = 1; j <= cc; j++) {
+                            String columnName = rs.getMetaData().getColumnName(j);
+
+                            Object val = JdbcTypeUtil.getRSData(rs, columnName, jdbcTypes[j - 1]);
+                            if (val == null) {
+                                continue;
+                            }
+
+                            MappingConfig.ColumnItem columnItem = hbaseMapping.getColumnItems().get(columnName);
+                            // 没有配置映射
+                            if (columnItem == null) {
+                                String family = hbaseMapping.getFamily();
+                                String qualifile = columnName;
+                                if (hbaseMapping.isUppercaseQualifier()) {
+                                    qualifile = qualifile.toUpperCase();
+                                }
+                                if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(Bytes.toBytes(val.toString()));
+                                    } else {
+                                        row.addCell(family, qualifile, Bytes.toBytes(val.toString()));
+                                    }
+                                } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                    Type type = Type.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(TypeUtil.toBytes(val, type));
+                                    } else {
+                                        row.addCell(family, qualifile, TypeUtil.toBytes(val, type));
+                                    }
+                                } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                    PhType phType = PhType.getType(classes[j - 1]);
+                                    if (hbaseMapping.getRowKey() == null && j == 1) {
+                                        row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                    } else {
+                                        row.addCell(family, qualifile, PhTypeUtil.toBytes(val, phType));
+                                    }
+                                }
+                            } else {
+                                // 如果不需要类型转换
+                                if (columnItem.getType() == null || "".equals(columnItem.getType())) {
+                                    if (val instanceof java.sql.Date) {
+                                        SimpleDateFormat dateFmt = new SimpleDateFormat("yyyy-MM-dd");
+                                        val = dateFmt.format((Date) val);
+                                    } else if (val instanceof Timestamp) {
+                                        SimpleDateFormat datetimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+                                        val = datetimeFmt.format((Date) val);
+                                    }
+
+                                    byte[] valBytes = Bytes.toBytes(val.toString());
+                                    if (columnItem.isRowKey()) {
+                                        if (columnItem.getRowKeyLen() != null) {
+                                            valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.setRowKey(valBytes);
+                                        }
+                                    } else {
+                                        row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                    }
+                                } else {
+                                    if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                                        byte[] valBytes = Bytes.toBytes(val.toString());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                valBytes = Bytes.toBytes(limitLenNum(columnItem.getRowKeyLen(), val));
+                                            }
+                                            row.setRowKey(valBytes);
+                                        } else {
+                                            row.addCell(columnItem.getFamily(), columnItem.getQualifier(), valBytes);
+                                        }
+                                    } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                                        Type type = Type.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            if (columnItem.getRowKeyLen() != null) {
+                                                String v = limitLenNum(columnItem.getRowKeyLen(), val);
+                                                row.setRowKey(Bytes.toBytes(v));
+                                            } else {
+                                                row.setRowKey(TypeUtil.toBytes(val, type));
+                                            }
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                TypeUtil.toBytes(val, type));
+                                        }
+                                    } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                                        PhType phType = PhType.getType(columnItem.getType());
+                                        if (columnItem.isRowKey()) {
+                                            row.setRowKey(PhTypeUtil.toBytes(val, phType));
+                                        } else {
+                                            row.addCell(columnItem.getFamily(),
+                                                columnItem.getQualifier(),
+                                                PhTypeUtil.toBytes(val, phType));
+                                        }
+                                    }
+                                }
+                            }
+                        }
+
+                        if (row.getRowKey() == null) throw new RuntimeException("RowKey 值为空");
+
+                        rows.add(row);
+                        complete = false;
+                        if (i % hbaseMapping.getCommitBatch() == 0 && !rows.isEmpty()) {
+                            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                            rows.clear();
+                            complete = true;
+                        }
+                        i++;
+                        successCount.incrementAndGet();
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("successful import count:" + successCount.get());
+                        }
+                    }
+
+                    if (!complete && !rows.isEmpty()) {
+                        hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                    }
+
+                } catch (Exception e) {
+                    logger.error(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(hbaseMapping.getHbaseTable() + " etl failed! ==>" + e.getMessage());
+                    // throw new RuntimeException(e);
+                }
+                return i;
+            });
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private static String limitLenNum(int len, Object val) {
+        if (val == null) {
+            return null;
+        }
+        if (val instanceof Number) {
+            return String.format("%0" + len + "d", (Number) ((Number) val).longValue());
+        } else if (val instanceof String) {
+            return String.format("%0" + len + "d", Long.parseLong((String) val));
+        }
+        return null;
+    }
+}

+ 459 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java

@@ -0,0 +1,459 @@
+package com.alibaba.otter.canal.client.adapter.hbase.service;
+
+import java.util.*;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.hbase.support.*;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+/**
+ * HBase同步操作业务
+ *
+ * @author rewerma 2018-8-21 下午06:45:49
+ * @version 1.0.0
+ */
+public class HbaseSyncService {
+
+    private Logger        logger = LoggerFactory.getLogger(this.getClass());
+
+    private HbaseTemplate hbaseTemplate;                                    // HBase操作模板
+
+    public HbaseSyncService(HbaseTemplate hbaseTemplate){
+        this.hbaseTemplate = hbaseTemplate;
+    }
+
+    public void sync(MappingConfig config, Dml dml) {
+        try {
+            if (config != null) {
+                String type = dml.getType();
+                if (type != null && type.equalsIgnoreCase("INSERT")) {
+                    insert(config, dml);
+                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                    update(config, dml);
+                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                    delete(config, dml);
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                }
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 插入操作
+     * 
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void insert(MappingConfig config, Dml dml) {
+        List<Map<String, Object>> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+
+        // if (!validHTable(config)) {
+        // logger.error("HBase table '{}' not exists",
+        // hbaseMapping.getHbaseTable());
+        // return;
+        // }
+        int i = 1;
+        boolean complete = false;
+        List<HRow> rows = new ArrayList<>();
+        for (Map<String, Object> r : data) {
+            HRow hRow = new HRow();
+
+            // 拼接复合rowKey
+            if (hbaseMapping.getRowKey() != null) {
+                String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                String rowKeyVale = getRowKeys(rowKeyColumns, r);
+                // params.put("rowKey", Bytes.toBytes(rowKeyVale));
+                hRow.setRowKey(Bytes.toBytes(rowKeyVale));
+            }
+
+            convertData2Row(hbaseMapping, hRow, r);
+            if (hRow.getRowKey() == null) {
+                throw new RuntimeException("empty rowKey");
+            }
+            rows.add(hRow);
+            complete = false;
+            if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rows.isEmpty()) {
+                hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                rows.clear();
+                complete = true;
+            }
+            i++;
+        }
+        if (!complete && !rows.isEmpty()) {
+            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+        }
+
+    }
+
+    /**
+     * 将Map数据转换为HRow行数据
+     * 
+     * @param hbaseMapping hbase映射配置
+     * @param hRow 行对象
+     * @param data Map数据
+     */
+    private static void convertData2Row(MappingConfig.HbaseMapping hbaseMapping, HRow hRow, Map<String, Object> data) {
+        Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems();
+        int i = 0;
+        for (Map.Entry<String, Object> entry : data.entrySet()) {
+            if (hbaseMapping.getExcludeColumns() != null && hbaseMapping.getExcludeColumns().contains(entry.getKey())) {
+                continue;
+            }
+            if (entry.getValue() != null) {
+                MappingConfig.ColumnItem columnItem = columnItems.get(entry.getKey());
+
+                byte[] bytes = typeConvert(columnItem, hbaseMapping, entry.getValue());
+
+                if (columnItem == null) {
+                    String familyName = hbaseMapping.getFamily();
+                    String qualifier = entry.getKey();
+                    if (hbaseMapping.isUppercaseQualifier()) {
+                        qualifier = qualifier.toUpperCase();
+                    }
+
+                    if (hbaseMapping.getRowKey() == null && i == 0) {
+                        hRow.setRowKey(bytes);
+                    } else {
+                        hRow.addCell(familyName, qualifier, bytes);
+                    }
+                } else {
+                    if (columnItem.isRowKey()) {
+                        if (columnItem.getRowKeyLen() != null && entry.getValue() != null) {
+                            if (entry.getValue() instanceof Number) {
+                                String v = String.format("%0" + columnItem.getRowKeyLen() + "d",
+                                    ((Number) entry.getValue()).longValue());
+                                bytes = Bytes.toBytes(v);
+                            } else {
+                                try {
+                                    String v = String.format("%0" + columnItem.getRowKeyLen() + "d",
+                                        Integer.parseInt((String) entry.getValue()));
+                                    bytes = Bytes.toBytes(v);
+                                } catch (Exception e) {
+                                    // ignore
+                                }
+                            }
+                        }
+                        hRow.setRowKey(bytes);
+                    } else {
+                        hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), bytes);
+                    }
+                }
+            }
+            i++;
+        }
+    }
+
+    /**
+     * 更新操作
+     * 
+     * @param config 配置对象
+     * @param dml dml对象
+     */
+    private void update(MappingConfig config, Dml dml) {
+        List<Map<String, Object>> data = dml.getData();
+        List<Map<String, Object>> old = dml.getOld();
+        if (old == null || old.isEmpty() || data == null || data.isEmpty()) {
+            return;
+        }
+
+        MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+
+        // if (!validHTable(config)) {
+        // logger.error("HBase table '{}' not exists",
+        // hbaseMapping.getHbaseTable());
+        // return;
+        // }
+
+        MappingConfig.ColumnItem rowKeyColumn = hbaseMapping.getRowKeyColumn();
+        int index = 0;
+        int i = 1;
+        boolean complete = false;
+        List<HRow> rows = new ArrayList<>();
+        out: for (Map<String, Object> r : data) {
+            byte[] rowKeyBytes;
+
+            if (hbaseMapping.getRowKey() != null) {
+                String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+
+                // 判断是否有复合主键修改
+                for (String updateColumn : old.get(index).keySet()) {
+                    for (String rowKeyColumnName : rowKeyColumns) {
+                        if (rowKeyColumnName.equalsIgnoreCase(updateColumn)) {
+                            // 调用删除插入操作
+                            deleteAndInsert(config, dml);
+                            continue out;
+                        }
+                    }
+                }
+
+                String rowKeyVale = getRowKeys(rowKeyColumns, r);
+                rowKeyBytes = Bytes.toBytes(rowKeyVale);
+            } else if (rowKeyColumn == null) {
+                Map<String, Object> rowKey = data.get(0);
+                rowKeyBytes = typeConvert(null, hbaseMapping, rowKey.values().iterator().next());
+            } else {
+                rowKeyBytes = getRowKeyBytes(hbaseMapping, rowKeyColumn, r);
+            }
+            if (rowKeyBytes == null) throw new RuntimeException("rowKey值为空");
+
+            Map<String, MappingConfig.ColumnItem> columnItems = hbaseMapping.getColumnItems();
+            HRow hRow = new HRow(rowKeyBytes);
+            for (String updateColumn : old.get(index).keySet()) {
+                if (hbaseMapping.getExcludeColumns() != null
+                    && hbaseMapping.getExcludeColumns().contains(updateColumn)) {
+                    continue;
+                }
+                MappingConfig.ColumnItem columnItem = columnItems.get(updateColumn);
+                if (columnItem == null) {
+                    String family = hbaseMapping.getFamily();
+                    String qualifier = updateColumn;
+                    if (hbaseMapping.isUppercaseQualifier()) {
+                        qualifier = qualifier.toUpperCase();
+                    }
+
+                    Object newVal = r.get(updateColumn);
+
+                    if (newVal == null) {
+                        hRow.addCell(family, qualifier, null);
+                    } else {
+                        hRow.addCell(family, qualifier, typeConvert(null, hbaseMapping, newVal));
+                    }
+                } else {
+                    // 排除修改id的情况
+                    if (columnItem.isRowKey()) continue;
+
+                    Object newVal = r.get(updateColumn);
+                    if (newVal == null) {
+                        hRow.addCell(columnItem.getFamily(), columnItem.getQualifier(), null);
+                    } else {
+                        hRow.addCell(columnItem.getFamily(),
+                            columnItem.getQualifier(),
+                            typeConvert(columnItem, hbaseMapping, newVal));
+                    }
+                }
+            }
+            rows.add(hRow);
+            complete = false;
+            if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rows.isEmpty()) {
+                hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                rows.clear();
+                complete = true;
+            }
+            i++;
+            index++;
+        }
+        if (!complete && !rows.isEmpty()) {
+            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+        }
+    }
+
+    private void delete(MappingConfig config, Dml dml) {
+        List<Map<String, Object>> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+
+        // if (!validHTable(config)) {
+        // logger.error("HBase table '{}' not exists",
+        // hbaseMapping.getHbaseTable());
+        // return;
+        // }
+
+        MappingConfig.ColumnItem rowKeyColumn = hbaseMapping.getRowKeyColumn();
+        boolean complete = false;
+        int i = 1;
+        Set<byte[]> rowKeys = new HashSet<>();
+        for (Map<String, Object> r : data) {
+            byte[] rowKeyBytes;
+
+            if (hbaseMapping.getRowKey() != null) {
+                String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+                String rowKeyVale = getRowKeys(rowKeyColumns, r);
+                rowKeyBytes = Bytes.toBytes(rowKeyVale);
+            } else if (rowKeyColumn == null) {
+                // 如果不需要类型转换
+                Map<String, Object> rowKey = data.get(0);
+                rowKeyBytes = typeConvert(null, hbaseMapping, rowKey.values().iterator().next());
+            } else {
+                rowKeyBytes = getRowKeyBytes(hbaseMapping, rowKeyColumn, r);
+            }
+            if (rowKeyBytes == null) throw new RuntimeException("rowKey值为空");
+            rowKeys.add(rowKeyBytes);
+            complete = false;
+            if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rowKeys.isEmpty()) {
+                hbaseTemplate.deletes(hbaseMapping.getHbaseTable(), rowKeys);
+                rowKeys.clear();
+                complete = true;
+            }
+            i++;
+        }
+        if (!complete && !rowKeys.isEmpty()) {
+            hbaseTemplate.deletes(hbaseMapping.getHbaseTable(), rowKeys);
+        }
+    }
+
+    private void deleteAndInsert(MappingConfig config, Dml dml) {
+        List<Map<String, Object>> data = dml.getData();
+        List<Map<String, Object>> old = dml.getOld();
+        if (old == null || old.isEmpty() || data == null || data.isEmpty()) {
+            return;
+        }
+        MappingConfig.HbaseMapping hbaseMapping = config.getHbaseMapping();
+
+        String[] rowKeyColumns = hbaseMapping.getRowKey().trim().split(",");
+
+        int index = 0;
+        int i = 1;
+        boolean complete = false;
+        Set<byte[]> rowKeys = new HashSet<>();
+        List<HRow> rows = new ArrayList<>();
+        for (Map<String, Object> r : data) {
+            // 拼接老的rowKey
+            List<String> updateSubRowKey = new ArrayList<>();
+            for (String rowKeyColumnName : rowKeyColumns) {
+                for (String updateColumn : old.get(index).keySet()) {
+                    if (rowKeyColumnName.equalsIgnoreCase(updateColumn)) {
+                        updateSubRowKey.add(rowKeyColumnName);
+                    }
+                }
+            }
+            if (updateSubRowKey.isEmpty()) {
+                throw new RuntimeException("没有更新复合主键的RowKey");
+            }
+            StringBuilder oldRowKey = new StringBuilder();
+            StringBuilder newRowKey = new StringBuilder();
+            for (String rowKeyColumnName : rowKeyColumns) {
+                newRowKey.append(r.get(rowKeyColumnName).toString()).append("|");
+                if (!updateSubRowKey.contains(rowKeyColumnName)) {
+                    // 从data取
+                    oldRowKey.append(r.get(rowKeyColumnName).toString()).append("|");
+                } else {
+                    // 从old取
+                    oldRowKey.append(old.get(index).get(rowKeyColumnName).toString()).append("|");
+                }
+            }
+            int len = newRowKey.length();
+            newRowKey.delete(len - 1, len);
+            len = oldRowKey.length();
+            oldRowKey.delete(len - 1, len);
+            byte[] newRowKeyBytes = Bytes.toBytes(newRowKey.toString());
+            byte[] oldRowKeyBytes = Bytes.toBytes(oldRowKey.toString());
+
+            rowKeys.add(oldRowKeyBytes);
+            HRow row = new HRow(newRowKeyBytes);
+            convertData2Row(hbaseMapping, row, r);
+            rows.add(row);
+            complete = false;
+            if (i % config.getHbaseMapping().getCommitBatch() == 0 && !rows.isEmpty()) {
+                hbaseTemplate.deletes(hbaseMapping.getHbaseTable(), rowKeys);
+
+                hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+                rowKeys.clear();
+                rows.clear();
+                complete = true;
+            }
+            i++;
+            index++;
+        }
+        if (!complete && !rows.isEmpty()) {
+            hbaseTemplate.deletes(hbaseMapping.getHbaseTable(), rowKeys);
+            hbaseTemplate.puts(hbaseMapping.getHbaseTable(), rows);
+        }
+    }
+
+    /**
+     * 根据对应的类型进行转换
+     * 
+     * @param columnItem 列项配置
+     * @param hbaseMapping hbase映射配置
+     * @param value 值
+     * @return 复合字段rowKey
+     */
+    private static byte[] typeConvert(MappingConfig.ColumnItem columnItem, MappingConfig.HbaseMapping hbaseMapping,
+                                      Object value) {
+        if (value == null) {
+            return null;
+        }
+        byte[] bytes = null;
+        if (columnItem == null || columnItem.getType() == null || "".equals(columnItem.getType())) {
+            if (MappingConfig.Mode.STRING == hbaseMapping.getMode()) {
+                bytes = Bytes.toBytes(value.toString());
+            } else if (MappingConfig.Mode.NATIVE == hbaseMapping.getMode()) {
+                bytes = TypeUtil.toBytes(value);
+            } else if (MappingConfig.Mode.PHOENIX == hbaseMapping.getMode()) {
+                PhType phType = PhType.getType(value.getClass());
+                bytes = PhTypeUtil.toBytes(value, phType);
+            }
+        } else {
+            if (hbaseMapping.getMode() == MappingConfig.Mode.STRING) {
+                bytes = Bytes.toBytes(value.toString());
+            } else if (hbaseMapping.getMode() == MappingConfig.Mode.NATIVE) {
+                Type type = Type.getType(columnItem.getType());
+                bytes = TypeUtil.toBytes(value, type);
+            } else if (hbaseMapping.getMode() == MappingConfig.Mode.PHOENIX) {
+                PhType phType = PhType.getType(columnItem.getType());
+                bytes = PhTypeUtil.toBytes(value, phType);
+            }
+        }
+        return bytes;
+    }
+
+    /**
+     * 获取复合字段作为rowKey的拼接
+     *
+     * @param rowKeyColumns 复合rowK对应的字段
+     * @param data 数据
+     * @return
+     */
+    private static String getRowKeys(String[] rowKeyColumns, Map<String, Object> data) {
+        StringBuilder rowKeyValue = new StringBuilder();
+        for (String rowKeyColumnName : rowKeyColumns) {
+            Object obj = data.get(rowKeyColumnName);
+            if (obj != null) {
+                rowKeyValue.append(obj.toString());
+            }
+            rowKeyValue.append("|");
+        }
+        int len = rowKeyValue.length();
+        if (len > 0) {
+            rowKeyValue.delete(len - 1, len);
+        }
+        return rowKeyValue.toString();
+    }
+
+    private static byte[] getRowKeyBytes(MappingConfig.HbaseMapping hbaseMapping, MappingConfig.ColumnItem rowKeyColumn,
+                                         Map<String, Object> rowData) {
+        Object val = rowData.get(rowKeyColumn.getColumn());
+        String v = null;
+        if (rowKeyColumn.getRowKeyLen() != null) {
+            if (val instanceof Number) {
+                v = String.format("%0" + rowKeyColumn.getRowKeyLen() + "d", (Number) ((Number) val).longValue());
+            } else if (val instanceof String) {
+                v = String.format("%0" + rowKeyColumn.getRowKeyLen() + "d", Long.parseLong((String) val));
+            }
+        }
+        if (v != null) {
+            return Bytes.toBytes(v);
+        } else {
+            return typeConvert(rowKeyColumn, hbaseMapping, val);
+        }
+    }
+
+}

+ 84 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/HRow.java

@@ -0,0 +1,84 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * HBase操作对象类
+ *
+ * @author machengyuan 2018-8-21 下午10:12:34
+ * @version 1.0.0
+ */
+public class HRow {
+
+    private byte[]      rowKey;
+    private List<HCell> cells = new ArrayList<>();
+
+    public HRow(){
+    }
+
+    public HRow(byte[] rowKey){
+        this.rowKey = rowKey;
+    }
+
+    public byte[] getRowKey() {
+        return rowKey;
+    }
+
+    public void setRowKey(byte[] rowKey) {
+        this.rowKey = rowKey;
+    }
+
+    public List<HCell> getCells() {
+        return cells;
+    }
+
+    public void setCells(List<HCell> cells) {
+        this.cells = cells;
+    }
+
+    public void addCell(String family, String qualifier, byte[] value) {
+        HCell hCell = new HCell(family, qualifier, value);
+        cells.add(hCell);
+    }
+
+    public static class HCell {
+
+        private String family;
+        private String qualifier;
+        private byte[] value;
+
+        public HCell(){
+        }
+
+        public HCell(String family, String qualifier, byte[] value){
+            this.family = family;
+            this.qualifier = qualifier;
+            this.value = value;
+        }
+
+        public String getFamily() {
+            return family;
+        }
+
+        public void setFamily(String family) {
+            this.family = family;
+        }
+
+        public String getQualifier() {
+            return qualifier;
+        }
+
+        public void setQualifier(String qualifier) {
+            this.qualifier = qualifier;
+        }
+
+        public byte[] getValue() {
+            return value;
+        }
+
+        public void setValue(byte[] value) {
+            this.value = value;
+        }
+    }
+}

+ 163 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/HbaseTemplate.java

@@ -0,0 +1,163 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * HBase操作模板
+ *
+ * @author machengyuan 2018-8-21 下午10:12:34
+ * @version 1.0.0
+ */
+public class HbaseTemplate {
+
+    private Logger     logger = LoggerFactory.getLogger(this.getClass());
+
+    private Connection conn;
+
+    public HbaseTemplate(Connection conn){
+        this.conn = conn;
+    }
+
+    public boolean tableExists(String tableName) {
+        try (HBaseAdmin admin = (HBaseAdmin) conn.getAdmin()) {
+
+            return admin.tableExists(TableName.valueOf(tableName));
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void createTable(String tableName, String... familyNames) {
+        try (HBaseAdmin admin = (HBaseAdmin) conn.getAdmin()) {
+
+            HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+            // 添加列簇
+            if (familyNames != null) {
+                for (String familyName : familyNames) {
+                    HColumnDescriptor hcd = new HColumnDescriptor(familyName);
+                    desc.addFamily(hcd);
+                }
+            }
+            admin.createTable(desc);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void disableTable(String tableName) {
+        try (HBaseAdmin admin = (HBaseAdmin) conn.getAdmin()) {
+            admin.disableTable(tableName);
+        } catch (IOException e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void deleteTable(String tableName) {
+        try (HBaseAdmin admin = (HBaseAdmin) conn.getAdmin()) {
+            if (admin.isTableEnabled(tableName)) {
+                disableTable(tableName);
+            }
+            admin.deleteTable(tableName);
+        } catch (IOException e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 插入一行数据
+     * 
+     * @param tableName 表名
+     * @param hRow 行数据对象
+     * @return 是否成功
+     */
+    public Boolean put(String tableName, HRow hRow) {
+        boolean flag = false;
+        try {
+            HTable table = (HTable) conn.getTable(TableName.valueOf(tableName));
+            Put put = new Put(hRow.getRowKey());
+            for (HRow.HCell hCell : hRow.getCells()) {
+                put.addColumn(Bytes.toBytes(hCell.getFamily()), Bytes.toBytes(hCell.getQualifier()), hCell.getValue());
+            }
+            table.put(put);
+            flag = true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        return flag;
+
+    }
+
+    /**
+     * 批量插入
+     * 
+     * @param tableName 表名
+     * @param rows 行数据对象集合
+     * @return 是否成功
+     */
+    public Boolean puts(String tableName, List<HRow> rows) {
+        boolean flag = false;
+        try {
+            HTable table = (HTable) conn.getTable(TableName.valueOf(tableName));
+            List<Put> puts = new ArrayList<>();
+            for (HRow hRow : rows) {
+                Put put = new Put(hRow.getRowKey());
+                for (HRow.HCell hCell : hRow.getCells()) {
+                    put.addColumn(Bytes.toBytes(hCell.getFamily()),
+                        Bytes.toBytes(hCell.getQualifier()),
+                        hCell.getValue());
+                }
+                puts.add(put);
+            }
+            if (!puts.isEmpty()) {
+                table.put(puts);
+            }
+            flag = true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        return flag;
+    }
+
+    /**
+     * 批量删除数据
+     * 
+     * @param tableName 表名
+     * @param rowKeys rowKey集合
+     * @return 是否成功
+     */
+    public Boolean deletes(String tableName, Set<byte[]> rowKeys) {
+        boolean flag = false;
+        try {
+            HTable table = (HTable) conn.getTable(TableName.valueOf(tableName));
+            List<Delete> deletes = new ArrayList<>();
+            for (byte[] rowKey : rowKeys) {
+                Delete delete = new Delete(rowKey);
+                deletes.add(delete);
+            }
+            if (!deletes.isEmpty()) {
+                table.delete(deletes);
+            }
+            flag = true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        return flag;
+    }
+}

+ 73 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhType.java

@@ -0,0 +1,73 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Date;
+
+/**
+ * Phoenix类型
+ *
+ * @author machengyuan 2018-8-21 下午06:12:34
+ * @version 1.0.0
+ */
+public enum PhType {
+                    DEFAULT, UNSIGNED_INT, UNSIGNED_LONG, UNSIGNED_TINYINT, UNSIGNED_SMALLINT, UNSIGNED_FLOAT,
+                    UNSIGNED_DOUBLE, INTEGER, BIGINT, TINYINT, SMALLINT, FLOAT, DOUBLE, DECIMAL, BOOLEAN, UNSIGNED_TIME,
+                    UNSIGNED_DATE, UNSIGNED_TIMESTAMP, TIME, DATE, TIMESTAMP, VARCHAR, VARBINARY;
+
+    private static Logger logger = LoggerFactory.getLogger(PhType.class);
+
+    public static PhType getType(Class<?> javaType) {
+        if (javaType == null) return DEFAULT;
+        PhType phType;
+        if (Integer.class == javaType || int.class == javaType) {
+            phType = INTEGER;
+        } else if (Long.class == javaType || long.class == javaType) {
+            phType = BIGINT;
+        } else if (Byte.class == javaType || byte.class == javaType) {
+            phType = TINYINT;
+        } else if (Short.class == javaType || short.class == javaType) {
+            phType = SMALLINT;
+        } else if (Float.class == javaType || float.class == javaType) {
+            phType = FLOAT;
+        } else if (Double.class == javaType || double.class == javaType) {
+            phType = DOUBLE;
+        } else if (Boolean.class == javaType || boolean.class == javaType) {
+            phType = BOOLEAN;
+        } else if (java.sql.Date.class == javaType) {
+            phType = DATE;
+        } else if (Time.class == javaType) {
+            phType = DATE;
+        } else if (Timestamp.class == javaType) {
+            phType = TIMESTAMP;
+        } else if (Date.class == javaType) {
+            phType = DATE;
+        } else if (byte[].class == javaType) {
+            phType = VARBINARY;
+        } else if (String.class == javaType) {
+            phType = VARCHAR;
+        } else if (BigDecimal.class == javaType) {
+            phType = DECIMAL;
+        } else if (BigInteger.class == javaType) {
+            phType = UNSIGNED_LONG;
+        } else {
+            phType = DEFAULT;
+        }
+        return phType;
+    }
+
+    public static PhType getType(String type) {
+        if (type == null) return DEFAULT;
+        try {
+            return PhType.valueOf(type.toUpperCase());
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return DEFAULT;
+        }
+    }
+}

+ 620 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/PhTypeUtil.java

@@ -0,0 +1,620 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.math.MathContext;
+import java.math.RoundingMode;
+import java.sql.Timestamp;
+import java.util.Date;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.joda.time.DateTime;
+
+import com.google.common.math.LongMath;
+
+/**
+ * Phoenix类型转换工具类
+ *
+ * @author machengyuan 2018-8-21 下午06:14:26
+ * @version 1.0.0
+ */
+public class PhTypeUtil {
+
+    public static byte[] toBytes(Object v, PhType phType) {
+        if (v == null) return null;
+        byte[] b = null;
+        if (phType == PhType.DEFAULT) {
+            PhType phType1 = PhType.getType(v.getClass());
+            if (phType1 != null && phType1 != PhType.DEFAULT) {
+                toBytes(v, phType1);
+            }
+        } else if (phType == PhType.INTEGER) {
+            b = new byte[Bytes.SIZEOF_INT];
+            encodeInt(((Number) v).intValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_INT) {
+            b = new byte[Bytes.SIZEOF_INT];
+            encodeUnsignedInt(((Number) v).intValue(), b, 0);
+        } else if (phType == PhType.BIGINT) {
+            b = new byte[Bytes.SIZEOF_LONG];
+            encodeLong(((Number) v).longValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_LONG) {
+            b = new byte[Bytes.SIZEOF_LONG];
+            encodeUnsignedLong(((Number) v).longValue(), b, 0);
+        } else if (phType == PhType.SMALLINT) {
+            b = new byte[Bytes.SIZEOF_SHORT];
+            encodeShort(((Number) v).shortValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_SMALLINT) {
+            b = new byte[Bytes.SIZEOF_SHORT];
+            encodeUnsignedShort(((Number) v).shortValue(), b, 0);
+        } else if (phType == PhType.TINYINT) {
+            b = new byte[Bytes.SIZEOF_BYTE];
+            encodeByte(((Number) v).byteValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_TINYINT) {
+            b = new byte[Bytes.SIZEOF_BYTE];
+            encodeUnsignedByte(((Number) v).byteValue(), b, 0);
+        } else if (phType == PhType.FLOAT) {
+            b = new byte[Bytes.SIZEOF_FLOAT];
+            encodeFloat(((Number) v).floatValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_FLOAT) {
+            b = new byte[Bytes.SIZEOF_FLOAT];
+            encodeUnsignedFloat(((Number) v).floatValue(), b, 0);
+        } else if (phType == PhType.DOUBLE) {
+            b = new byte[Bytes.SIZEOF_DOUBLE];
+            encodeDouble(((Number) v).doubleValue(), b, 0);
+        } else if (phType == PhType.UNSIGNED_DOUBLE) {
+            b = new byte[Bytes.SIZEOF_DOUBLE];
+            encodeUnsignedDouble(((Number) v).doubleValue(), b, 0);
+        } else if (phType == PhType.BOOLEAN) {
+            if ((Boolean) v) {
+                b = new byte[] { 1 };
+            } else {
+                b = new byte[] { 0 };
+            }
+        } else if (phType == PhType.TIME || phType == PhType.DATE) {
+            b = new byte[Bytes.SIZEOF_LONG];
+            encodeDate(v, b, 0);
+        } else if (phType == PhType.TIMESTAMP) {
+            b = new byte[Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT];
+            encodeTimestamp(v, b, 0);
+        } else if (phType == PhType.UNSIGNED_TIME || phType == PhType.UNSIGNED_DATE) {
+            b = new byte[Bytes.SIZEOF_LONG];
+            encodeUnsignedDate(v, b, 0);
+        } else if (phType == PhType.UNSIGNED_TIMESTAMP) {
+            b = new byte[Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT];
+            encodeUnsignedTimestamp(v, b, 0);
+        } else if (phType == PhType.VARBINARY) {
+            b = (byte[]) v;
+        } else if (phType == PhType.VARCHAR) {
+            b = Bytes.toBytes(v.toString());
+        } else if (phType == PhType.DECIMAL) {
+            if (v instanceof BigDecimal) {
+                b = encodeDecimal(v);
+            } else if (v instanceof Number) {
+                b = encodeDecimal(new BigDecimal(v.toString()));
+            }
+        }
+        return b;
+    }
+
+    public static Object toObject(byte[] b, PhType phType) {
+        if (b == null) return null;
+        Object v = null;
+        if (phType == PhType.INTEGER) {
+            v = decodeInt(b, 0);
+        } else if (phType == PhType.UNSIGNED_INT) {
+            v = decodeUnsignedInt(b, 0);
+        } else if (phType == PhType.BIGINT) {
+            v = decodeLong(b, 0);
+        } else if (phType == PhType.UNSIGNED_LONG) {
+            v = decodeUnsignedLong(b, 0);
+        } else if (phType == PhType.SMALLINT) {
+            v = decodeShort(b, 0);
+        } else if (phType == PhType.UNSIGNED_SMALLINT) {
+            v = decodeUnsignedShort(b, 0);
+        } else if (phType == PhType.TINYINT) {
+            v = decodeByte(b, 0);
+        } else if (phType == PhType.UNSIGNED_TINYINT) {
+            v = decodeUnsignedByte(b, 0);
+        } else if (phType == PhType.FLOAT) {
+            v = decodeFloat(b, 0);
+        } else if (phType == PhType.UNSIGNED_FLOAT) {
+            v = decodeUnsignedFloat(b, 0);
+        } else if (phType == PhType.DOUBLE) {
+            v = decodeDouble(b, 0);
+        } else if (phType == PhType.UNSIGNED_DOUBLE) {
+            v = decodeUnsignedDouble(b, 0);
+        } else if (phType == PhType.BOOLEAN) {
+            checkForSufficientLength(b, 0, Bytes.SIZEOF_BOOLEAN);
+            if (b[0] == 1) {
+                v = true;
+            } else if (b[0] == 0) {
+                v = false;
+            }
+        } else if (phType == PhType.TIME || phType == PhType.DATE) {
+            v = new Date(decodeLong(b, 0));
+        } else if (phType == PhType.TIMESTAMP) {
+            long millisDeserialized = decodeLong(b, 0);
+            Timestamp ts = new Timestamp(millisDeserialized);
+            int nanosDeserialized = decodeUnsignedInt(b, Bytes.SIZEOF_LONG);
+            ts.setNanos(nanosDeserialized < 1000000 ? ts.getNanos() + nanosDeserialized : nanosDeserialized);
+            v = ts;
+        } else if (phType == PhType.UNSIGNED_TIME || phType == PhType.UNSIGNED_DATE) {
+            v = new Date(decodeUnsignedLong(b, 0));
+        } else if (phType == PhType.UNSIGNED_TIMESTAMP) {
+            long millisDeserialized = decodeUnsignedLong(b, 0);
+            Timestamp ts = new Timestamp(millisDeserialized);
+            int nanosDeserialized = decodeUnsignedInt(b, Bytes.SIZEOF_LONG);
+            ts.setNanos(nanosDeserialized < 1000000 ? ts.getNanos() + nanosDeserialized : nanosDeserialized);
+            v = ts;
+        } else if (phType == PhType.VARBINARY) {
+            v = b;
+        } else if (phType == PhType.VARCHAR || phType == PhType.DEFAULT) {
+            v = Bytes.toString(b);
+        } else if (phType == PhType.DECIMAL) {
+            v = decodeDecimal(b, 0, b.length);
+        }
+
+        return v;
+    }
+
+    private static int decodeInt(byte[] bytes, int o) {
+        checkForSufficientLength(bytes, o, Bytes.SIZEOF_INT);
+        int v;
+        v = bytes[o] ^ 0x80; // Flip sign bit back
+        for (int i = 1; i < Bytes.SIZEOF_INT; i++) {
+            v = (v << 8) + (bytes[o + i] & 0xff);
+        }
+        return v;
+    }
+
+    private static int encodeInt(int v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_INT);
+        b[o + 0] = (byte) ((v >> 24) ^ 0x80); // Flip sign bit so that INTEGER
+                                              // is binary comparable
+        b[o + 1] = (byte) (v >> 16);
+        b[o + 2] = (byte) (v >> 8);
+        b[o + 3] = (byte) v;
+        return Bytes.SIZEOF_INT;
+    }
+
+    private static int decodeUnsignedInt(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_INT);
+
+        int v = Bytes.toInt(b, o);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedInt(int v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_INT);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putInt(b, o, v);
+        return Bytes.SIZEOF_INT;
+    }
+
+    private static long decodeLong(byte[] bytes, int o) {
+        checkForSufficientLength(bytes, o, Bytes.SIZEOF_LONG);
+        long v;
+        byte b = bytes[o];
+        v = b ^ 0x80; // Flip sign bit back
+        for (int i = 1; i < Bytes.SIZEOF_LONG; i++) {
+            b = bytes[o + i];
+            v = (v << 8) + (b & 0xff);
+        }
+        return v;
+    }
+
+    private static int encodeLong(long v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_LONG);
+        b[o + 0] = (byte) ((v >> 56) ^ 0x80); // Flip sign bit so that INTEGER
+                                              // is binary comparable
+        b[o + 1] = (byte) (v >> 48);
+        b[o + 2] = (byte) (v >> 40);
+        b[o + 3] = (byte) (v >> 32);
+        b[o + 4] = (byte) (v >> 24);
+        b[o + 5] = (byte) (v >> 16);
+        b[o + 6] = (byte) (v >> 8);
+        b[o + 7] = (byte) v;
+        return Bytes.SIZEOF_LONG;
+    }
+
+    private static long decodeUnsignedLong(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_LONG);
+        long v = 0;
+        for (int i = o; i < o + Bytes.SIZEOF_LONG; i++) {
+            v <<= 8;
+            v ^= b[i] & 0xFF;
+        }
+
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedLong(long v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_LONG);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putLong(b, o, v);
+        return Bytes.SIZEOF_LONG;
+    }
+
+    private static short decodeShort(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_SHORT);
+        int v;
+        v = b[o] ^ 0x80; // Flip sign bit back
+        for (int i = 1; i < Bytes.SIZEOF_SHORT; i++) {
+            v = (v << 8) + (b[o + i] & 0xff);
+        }
+        return (short) v;
+    }
+
+    private static int encodeShort(short v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_SHORT);
+        b[o + 0] = (byte) ((v >> 8) ^ 0x80); // Flip sign bit so that Short is
+                                             // binary comparable
+        b[o + 1] = (byte) v;
+        return Bytes.SIZEOF_SHORT;
+    }
+
+    private static short decodeUnsignedShort(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_SHORT);
+        short v = Bytes.toShort(b, o);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedShort(short v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_SHORT);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putShort(b, o, v);
+        return Bytes.SIZEOF_SHORT;
+    }
+
+    private static byte decodeByte(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_BYTE);
+        int v;
+        v = b[o] ^ 0x80; // Flip sign bit back
+        return (byte) v;
+    }
+
+    private static int encodeByte(byte v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_BYTE);
+        b[o] = (byte) (v ^ 0x80); // Flip sign bit so that Short is binary
+                                  // comparable
+        return Bytes.SIZEOF_BYTE;
+    }
+
+    private static byte decodeUnsignedByte(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_BYTE);
+        byte v = b[o];
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedByte(byte v, byte[] b, int o) {
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putByte(b, o, v);
+        return Bytes.SIZEOF_BYTE;
+    }
+
+    private static float decodeFloat(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_INT);
+        int value;
+        value = Bytes.toInt(b, o);
+        value--;
+        value ^= (~value >> Integer.SIZE - 1) | Integer.MIN_VALUE;
+        return Float.intBitsToFloat(value);
+    }
+
+    private static int encodeFloat(float v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_FLOAT);
+        int i = Float.floatToIntBits(v);
+        i = (i ^ ((i >> Integer.SIZE - 1) | Integer.MIN_VALUE)) + 1;
+        Bytes.putInt(b, o, i);
+        return Bytes.SIZEOF_FLOAT;
+    }
+
+    private static float decodeUnsignedFloat(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_FLOAT);
+        float v = Bytes.toFloat(b, o);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedFloat(float v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_FLOAT);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putFloat(b, o, v);
+        return Bytes.SIZEOF_FLOAT;
+    }
+
+    private static double decodeDouble(byte[] bytes, int o) {
+        checkForSufficientLength(bytes, o, Bytes.SIZEOF_LONG);
+        long l;
+        l = Bytes.toLong(bytes, o);
+        l--;
+        l ^= (~l >> Long.SIZE - 1) | Long.MIN_VALUE;
+        return Double.longBitsToDouble(l);
+    }
+
+    private static int encodeDouble(double v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_LONG);
+        long l = Double.doubleToLongBits(v);
+        l = (l ^ ((l >> Long.SIZE - 1) | Long.MIN_VALUE)) + 1;
+        Bytes.putLong(b, o, l);
+        return Bytes.SIZEOF_LONG;
+    }
+
+    private static double decodeUnsignedDouble(byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_DOUBLE);
+        double v = Bytes.toDouble(b, o);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        return v;
+    }
+
+    private static int encodeUnsignedDouble(double v, byte[] b, int o) {
+        checkForSufficientLength(b, o, Bytes.SIZEOF_DOUBLE);
+        if (v < 0) {
+            throw new RuntimeException();
+        }
+        Bytes.putDouble(b, o, v);
+        return Bytes.SIZEOF_DOUBLE;
+    }
+
+    private static int encodeDate(Object v, byte[] b, int o) {
+        if (v instanceof Date) {
+            encodeLong(((Date) v).getTime(), b, 0);
+        } else if (v instanceof String) {
+            String dateStr = (String) v;
+            Date date;
+            try {
+                date = parseDatetime(dateStr);
+                if (date != null) {
+                    encodeLong(date.getTime(), b, 0);
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+        return Bytes.SIZEOF_LONG;
+    }
+
+    private static int encodeTimestamp(Object v, byte[] b, int o) {
+        if (v instanceof Timestamp) {
+            Timestamp ts = (Timestamp) v;
+            encodeLong(ts.getTime(), b, o);
+            Bytes.putInt(b, Bytes.SIZEOF_LONG, ts.getNanos() % 1000000);
+        } else {
+            encodeDate(v, b, o);
+        }
+        return Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT;
+    }
+
+    private static int encodeUnsignedDate(Object v, byte[] b, int o) {
+        if (v instanceof Date) {
+            encodeUnsignedLong(((Date) v).getTime(), b, 0);
+        } else if (v instanceof String) {
+            String dateStr = (String) v;
+            Date date;
+            try {
+                date = parseDatetime(dateStr);
+                if (date != null) {
+                    encodeUnsignedLong(date.getTime(), b, 0);
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+        return Bytes.SIZEOF_LONG;
+    }
+
+    private static int encodeUnsignedTimestamp(Object v, byte[] b, int o) {
+        if (v instanceof Timestamp) {
+            Timestamp ts = (Timestamp) v;
+            encodeUnsignedLong(ts.getTime(), b, o);
+            Bytes.putInt(b, Bytes.SIZEOF_LONG, ts.getNanos() % 1000000);
+        } else {
+            encodeUnsignedDate(v, b, o);
+        }
+        return Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT;
+    }
+
+    private static byte[] encodeDecimal(Object object) {
+        if (object == null) {
+            return new byte[0];
+        }
+        BigDecimal v = (BigDecimal) object;
+        v = v.round(DEFAULT_MATH_CONTEXT).stripTrailingZeros();
+        int len = getLength(v);
+        byte[] result = new byte[Math.min(len, 21)];
+        decimalToBytes(v, result, 0, len);
+        return result;
+    }
+
+    private static BigDecimal decodeDecimal(byte[] bytes, int offset, int length) {
+        if (length == 1 && bytes[offset] == ZERO_BYTE) {
+            return BigDecimal.ZERO;
+        }
+        int signum = ((bytes[offset] & 0x80) == 0) ? -1 : 1;
+        int scale;
+        int index;
+        int digitOffset;
+        long multiplier = 100L;
+        int begIndex = offset + 1;
+        if (signum == 1) {
+            scale = (byte) (((bytes[offset] & 0x7F) - 65) * -2);
+            index = offset + length;
+            digitOffset = POS_DIGIT_OFFSET;
+        } else {
+            scale = (byte) ((~bytes[offset] - 65 - 128) * -2);
+            index = offset + length - (bytes[offset + length - 1] == NEG_TERMINAL_BYTE ? 1 : 0);
+            digitOffset = -NEG_DIGIT_OFFSET;
+        }
+        length = index - offset;
+        long l = signum * bytes[--index] - digitOffset;
+        if (l % 10 == 0) { // trailing zero
+            scale--; // drop trailing zero and compensate in the scale
+            l /= 10;
+            multiplier = 10;
+        }
+        // Use long arithmetic for as long as we can
+        while (index > begIndex) {
+            if (l >= MAX_LONG_FOR_DESERIALIZE || multiplier >= Long.MAX_VALUE / 100) {
+                multiplier = LongMath.divide(multiplier, 100L, RoundingMode.UNNECESSARY);
+                break; // Exit loop early so we don't overflow our multiplier
+            }
+            int digit100 = signum * bytes[--index] - digitOffset;
+            l += digit100 * multiplier;
+            multiplier = LongMath.checkedMultiply(multiplier, 100);
+        }
+
+        BigInteger bi;
+        // If still more digits, switch to BigInteger arithmetic
+        if (index > begIndex) {
+            bi = BigInteger.valueOf(l);
+            BigInteger biMultiplier = BigInteger.valueOf(multiplier).multiply(ONE_HUNDRED);
+            do {
+                int digit100 = signum * bytes[--index] - digitOffset;
+                bi = bi.add(biMultiplier.multiply(BigInteger.valueOf(digit100)));
+                biMultiplier = biMultiplier.multiply(ONE_HUNDRED);
+            } while (index > begIndex);
+            if (signum == -1) {
+                bi = bi.negate();
+            }
+        } else {
+            bi = BigInteger.valueOf(l * signum);
+        }
+        // Update the scale based on the precision
+        scale += (length - 2) * 2;
+        BigDecimal v = new BigDecimal(bi, scale);
+        return v;
+    }
+
+    private static int getLength(BigDecimal v) {
+        int signum = v.signum();
+        if (signum == 0) { // Special case for zero
+            return 1;
+        }
+        return (signum < 0 ? 2 : 1) + (v.precision() + 1 + (v.scale() % 2 == 0 ? 0 : 1)) / 2;
+    }
+
+    private static final int         MAX_PRECISION            = 38;
+    private static final MathContext DEFAULT_MATH_CONTEXT     = new MathContext(MAX_PRECISION, RoundingMode.HALF_UP);
+    private static final Integer     MAX_BIG_DECIMAL_BYTES    = 21;
+    private static final byte        ZERO_BYTE                = (byte) 0x80;
+    private static final byte        NEG_TERMINAL_BYTE        = (byte) 102;
+    private static final int         EXP_BYTE_OFFSET          = 65;
+    private static final int         POS_DIGIT_OFFSET         = 1;
+    private static final int         NEG_DIGIT_OFFSET         = 101;
+    private static final BigInteger  MAX_LONG                 = BigInteger.valueOf(Long.MAX_VALUE);
+    private static final BigInteger  MIN_LONG                 = BigInteger.valueOf(Long.MIN_VALUE);
+    private static final BigInteger  ONE_HUNDRED              = BigInteger.valueOf(100);
+    private static final long        MAX_LONG_FOR_DESERIALIZE = Long.MAX_VALUE / 1000;
+
+    private static int decimalToBytes(BigDecimal v, byte[] result, final int offset, int length) {
+        int signum = v.signum();
+        if (signum == 0) {
+            result[offset] = ZERO_BYTE;
+            return 1;
+        }
+        int index = offset + length;
+        int scale = v.scale();
+        int expOffset = scale % 2 * (scale < 0 ? -1 : 1);
+        int multiplyBy;
+        BigInteger divideBy;
+        if (expOffset == 0) {
+            multiplyBy = 1;
+            divideBy = ONE_HUNDRED;
+        } else {
+            multiplyBy = 10;
+            divideBy = BigInteger.TEN;
+        }
+        // Normalize the scale based on what is necessary to end up with a base
+        // 100
+        // decimal (i.e. 10.123e3)
+        int digitOffset;
+        BigInteger compareAgainst;
+        if (signum == 1) {
+            digitOffset = POS_DIGIT_OFFSET;
+            compareAgainst = MAX_LONG;
+            scale -= (length - 2) * 2;
+            result[offset] = (byte) ((-(scale + expOffset) / 2 + EXP_BYTE_OFFSET) | 0x80);
+        } else {
+            digitOffset = NEG_DIGIT_OFFSET;
+            compareAgainst = MIN_LONG;
+            // Scale adjustment shouldn't include terminal byte in length
+            scale -= (length - 2 - 1) * 2;
+            result[offset] = (byte) (~(-(scale + expOffset) / 2 + EXP_BYTE_OFFSET + 128) & 0x7F);
+            if (length <= MAX_BIG_DECIMAL_BYTES) {
+                result[--index] = NEG_TERMINAL_BYTE;
+            } else {
+                // Adjust length and offset down because we don't have enough
+                // room
+                length = MAX_BIG_DECIMAL_BYTES;
+                index = offset + length;
+            }
+        }
+        BigInteger bi = v.unscaledValue();
+        // Use BigDecimal arithmetic until we can fit into a long
+        while (bi.compareTo(compareAgainst) * signum > 0) {
+            BigInteger[] dandr = bi.divideAndRemainder(divideBy);
+            bi = dandr[0];
+            int digit = dandr[1].intValue();
+            result[--index] = (byte) (digit * multiplyBy + digitOffset);
+            multiplyBy = 1;
+            divideBy = ONE_HUNDRED;
+        }
+        long l = bi.longValue();
+        do {
+            long divBy = 100 / multiplyBy;
+            long digit = l % divBy;
+            l /= divBy;
+            result[--index] = (byte) (digit * multiplyBy + digitOffset);
+            multiplyBy = 1;
+        } while (l != 0);
+
+        return length;
+    }
+
+    private static void checkForSufficientLength(byte[] b, int offset, int requiredLength) {
+        if (b.length < offset + requiredLength) {
+            throw new RuntimeException("Expected length of at least " + requiredLength + " bytes, but had "
+                                       + (b.length - offset));
+        }
+    }
+
+    private static Date parseDatetime(String dateStr) {
+        Date date = null;
+        int len = dateStr.length();
+        if (len == 10 && dateStr.charAt(4) == '-' && dateStr.charAt(7) == '-') {
+            date = new DateTime(dateStr).toDate();
+        } else if (len == 8 && dateStr.charAt(2) == ':' && dateStr.charAt(5) == ':') {
+            date = new DateTime("T" + dateStr).toDate();
+        } else if (len >= 19 && dateStr.charAt(4) == '-' && dateStr.charAt(7) == '-' && dateStr.charAt(13) == ':'
+                   && dateStr.charAt(16) == ':') {
+            date = new DateTime(dateStr.replace(" ", "T")).toDate();
+        }
+        return date;
+    }
+}

+ 67 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/Type.java

@@ -0,0 +1,67 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Java类型
+ *
+ * @author machengyuan 2018-8-21 下午06:11:36
+ * @version 1.0.0
+ */
+public enum Type {
+                  DEFAULT, STRING, INTEGER, LONG, SHORT, BOOLEAN, FLOAT, DOUBLE, BIGDECIMAL, DATE, BYTE, BYTES;
+
+    private static Logger logger = LoggerFactory.getLogger(Type.class);
+
+    public static Type getType(String type) {
+        if (type == null) {
+            return DEFAULT;
+        }
+        try {
+            return Type.valueOf(type);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return DEFAULT;
+        }
+    }
+
+    public static Type getType(Class<?> javaType) {
+        if (javaType == null) {
+            return DEFAULT;
+        }
+        Type type;
+        if (Integer.class == javaType || int.class == javaType) {
+            type = INTEGER;
+        } else if (Long.class == javaType || long.class == javaType) {
+            type = LONG;
+        } else if (Byte.class == javaType || byte.class == javaType) {
+            type = BYTE;
+        } else if (Short.class == javaType || short.class == javaType) {
+            type = SHORT;
+        } else if (Float.class == javaType || float.class == javaType) {
+            type = FLOAT;
+        } else if (Double.class == javaType || double.class == javaType) {
+            type = DOUBLE;
+        } else if (Boolean.class == javaType || boolean.class == javaType) {
+            type = BOOLEAN;
+        } else if (Date.class == javaType) {
+            type = DATE;
+        } else if (byte[].class == javaType) {
+            type = BYTES;
+        } else if (String.class == javaType) {
+            type = STRING;
+        } else if (BigDecimal.class == javaType) {
+            type = BIGDECIMAL;
+        } else if (BigInteger.class == javaType) {
+            type = LONG;
+        } else {
+            type = DEFAULT;
+        }
+        return type;
+    }
+}

+ 189 - 0
client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/support/TypeUtil.java

@@ -0,0 +1,189 @@
+package com.alibaba.otter.canal.client.adapter.hbase.support;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Date;
+
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Java类型转换工具类
+ *
+ * @author machengyuan 2018-8-21 下午06:12:34
+ * @version 1.0.0
+ */
+public class TypeUtil {
+
+    public static byte[] toBytes(Object obj) {
+        if (obj == null) {
+            return null;
+        }
+        byte[] bytes;
+        Class<?> clazz = obj.getClass();
+        if (clazz == String.class) {
+            bytes = Bytes.toBytes((String) obj);
+        } else if (clazz == Integer.class || clazz == int.class) {
+            bytes = Bytes.toBytes((Integer) obj);
+        } else if (clazz == Long.class || clazz == long.class) {
+            bytes = Bytes.toBytes((Long) obj);
+        } else if (clazz == Short.class || clazz == short.class) {
+            bytes = Bytes.toBytes((Short) obj);
+        } else if (clazz == Boolean.class || clazz == boolean.class) {
+            bytes = Bytes.toBytes((Boolean) obj);
+        } else if (clazz == Float.class || clazz == float.class) {
+            bytes = Bytes.toBytes((Float) obj);
+        } else if (clazz == Double.class || clazz == double.class) {
+            bytes = Bytes.toBytes((Double) obj);
+        } else if (clazz == Byte.class || clazz == byte.class) {
+            bytes = new byte[] { (byte) obj };
+        } else if (clazz == BigDecimal.class) {
+            bytes = Bytes.toBytes((BigDecimal) obj);
+        } else if (clazz == BigInteger.class) {
+            bytes = Bytes.toBytes(((BigInteger) obj).longValue());
+        } else if (clazz == Date.class) {
+            bytes = Bytes.toBytes(((Date) obj).getTime());
+        } else if (clazz == byte[].class) {
+            bytes = (byte[]) obj;
+        } else {
+            // 其余类型统一转换为string
+            bytes = Bytes.toBytes(obj.toString());
+        }
+        return bytes;
+    }
+
+    public static byte[] toBytes(Object v, Type type) {
+        if (v == null) {
+            return null;
+        }
+        byte[] b = null;
+        if (type == Type.DEFAULT) {
+            Type type1 = Type.getType(v.getClass());
+            if (type1 != null && type1 != Type.DEFAULT) {
+                b = toBytes(v, type1);
+            }
+        } else if (type == Type.STRING) {
+            b = Bytes.toBytes(v.toString());
+        } else if (type == Type.INTEGER) {
+            b = Bytes.toBytes(((Number) v).intValue());
+        } else if (type == Type.LONG) {
+            b = Bytes.toBytes(((Number) v).longValue());
+        } else if (type == Type.SHORT) {
+            b = Bytes.toBytes(((Number) v).shortValue());
+        } else if (type == Type.BYTE) {
+            b = Bytes.toBytes(((Number) v).byteValue());
+        } else if (type == Type.FLOAT) {
+            b = Bytes.toBytes(((Number) v).floatValue());
+        } else if (type == Type.DOUBLE) {
+            b = Bytes.toBytes(((Number) v).doubleValue());
+        } else if (type == Type.BOOLEAN) {
+            b = Bytes.toBytes(((Boolean) v));
+        } else if (type == Type.DATE) {
+            b = Bytes.toBytes(((Date) v).getTime());
+        } else if (type == Type.BYTES) {
+            b = (byte[]) v;
+        } else if (type == Type.BIGDECIMAL) {
+            if (v instanceof BigDecimal) {
+                b = Bytes.toBytes((BigDecimal) v);
+            } else {
+                b = Bytes.toBytes(new BigDecimal(v.toString()));
+            }
+        }
+        return b;
+    }
+
+    @SuppressWarnings("unchecked")
+    public static <T> T toObject(byte[] bytes, Class<T> clazz) {
+        if (bytes == null) {
+            return null;
+        }
+        Object res;
+        if (String.class == clazz) {
+            res = Bytes.toString(bytes);
+        } else if (Integer.class == clazz || int.class == clazz) {
+            res = Bytes.toInt(bytes);
+        } else if (Long.class == clazz || long.class == clazz) {
+            res = Bytes.toLong(bytes);
+        } else if (Short.class == clazz || short.class == clazz) {
+            res = Bytes.toShort(bytes);
+        } else if (Boolean.class == clazz || boolean.class == clazz) {
+            res = Bytes.toBoolean(bytes);
+        } else if (Float.class == clazz || float.class == clazz) {
+            res = Bytes.toFloat(bytes);
+        } else if (Double.class == clazz || double.class == clazz) {
+            res = Bytes.toDouble(bytes);
+        } else if (Byte.class == clazz || byte.class == clazz) {
+            res = bytes[0];
+        } else if (BigDecimal.class == clazz) {
+            res = Bytes.toBigDecimal(bytes);
+        } else if (BigInteger.class == clazz) {
+            res = Bytes.toLong(bytes);
+        } else if (java.sql.Date.class == clazz) {
+            long ts = Bytes.toLong(bytes);
+            res = new java.sql.Date(ts);
+        } else if (Time.class == clazz) {
+            long ts = Bytes.toLong(bytes);
+            res = new Time(ts);
+        } else if (Timestamp.class == clazz) {
+            long ts = Bytes.toLong(bytes);
+            res = new Timestamp(ts);
+        } else if (Date.class == clazz) {
+            long ts = Bytes.toLong(bytes);
+            res = new Date(ts);
+        } else {
+            throw new IllegalArgumentException("mismatch class type");
+        }
+        return (T) res;
+    }
+
+    @SuppressWarnings("unchecked")
+    public static <T> T toObject(byte[] bytes, Type type) {
+        if (bytes == null) {
+            return null;
+        }
+        Object res = null;
+        if (type == Type.STRING || type == Type.DEFAULT) {
+            res = Bytes.toString(bytes);
+        } else if (type == Type.INTEGER) {
+            if (bytes.length == Bytes.SIZEOF_INT) {
+                res = Bytes.toInt(bytes);
+            }
+        } else if (type == Type.LONG) {
+            if (bytes.length == Bytes.SIZEOF_LONG) {
+                res = Bytes.toLong(bytes);
+            }
+        } else if (type == Type.SHORT) {
+            if (bytes.length == Bytes.SIZEOF_SHORT) {
+                res = Bytes.toShort(bytes);
+            }
+        } else if (type == Type.BYTE) {
+            if (bytes.length == Bytes.SIZEOF_BYTE) {
+                res = bytes[0];
+            }
+        } else if (type == Type.FLOAT) {
+            if (bytes.length == Bytes.SIZEOF_FLOAT) {
+                res = Bytes.toFloat(bytes);
+            }
+        } else if (type == Type.DOUBLE) {
+            if (bytes.length == Bytes.SIZEOF_DOUBLE) {
+                res = Bytes.toDouble(bytes);
+            }
+        } else if (type == Type.BOOLEAN) {
+            if (bytes.length == Bytes.SIZEOF_BOOLEAN) {
+                res = Bytes.toBoolean(bytes);
+            }
+        } else if (type == Type.DATE) {
+            if (bytes.length == Bytes.SIZEOF_LONG) {
+                res = new Date(Bytes.toLong(bytes));
+            }
+        } else if (type == Type.BYTES) {
+            res = bytes;
+        } else if (type == Type.BIGDECIMAL) {
+            res = Bytes.toBigDecimal(bytes);
+        } else {
+            throw new IllegalArgumentException("mismatch class type");
+        }
+        return (T) res;
+    }
+}

+ 1 - 0
client-adapter/hbase/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter

@@ -0,0 +1 @@
+hbase=com.alibaba.otter.canal.client.adapter.hbase.HbaseAdapter

+ 60 - 0
client-adapter/hbase/src/main/resources/hbase/mytest_person2.yml

@@ -0,0 +1,60 @@
+dataSourceKey: defaultDS
+destination: example
+hbaseMapping:
+  mode: STRING  #NATIVE   #PHOENIX
+  database: mytest  # 数据库名
+  table: person2     # 数据库表名
+  hbaseTable: MYTEST.PERSON2   # HBase表名
+  family: CF  # 默认统一Family名称
+  uppercaseQualifier: true  # 字段名转大写, 默认为true
+  commitBatch: 3000 # 批量提交的大小
+  #rowKey: id,type  # 复合字段rowKey不能和columns中的rowKey重复
+  columns:
+    # 数据库字段:HBase对应字段
+    id: ROWKEY LEN:15
+    name: NAME
+    email: EMAIL
+    type:
+    c_time: C_TIME
+    birthday: BIRTHDAY
+#  excludeColumns:
+#    - lat   # 忽略字段
+
+# -- NATIVE类型
+# $DEFAULT
+# $STRING
+# $INTEGER
+# $LONG
+# $SHORT
+# $BOOLEAN
+# $FLOAT
+# $DOUBLE
+# $BIGDECIMAL
+# $DATE
+# $BYTE
+# $BYTES
+
+# -- PHOENIX类型
+# $DEFAULT                  对应PHOENIX里的VARCHAR
+# $UNSIGNED_INT             对应PHOENIX里的UNSIGNED_INT           4字节
+# $UNSIGNED_LONG            对应PHOENIX里的UNSIGNED_LONG          8字节
+# $UNSIGNED_TINYINT         对应PHOENIX里的UNSIGNED_TINYINT       1字节
+# $UNSIGNED_SMALLINT        对应PHOENIX里的UNSIGNED_SMALLINT      2字节
+# $UNSIGNED_FLOAT           对应PHOENIX里的UNSIGNED_FLOAT         4字节
+# $UNSIGNED_DOUBLE          对应PHOENIX里的UNSIGNED_DOUBLE        8字节
+# $INTEGER                  对应PHOENIX里的INTEGER                4字节
+# $BIGINT                   对应PHOENIX里的BIGINT                 8字节
+# $TINYINT                  对应PHOENIX里的TINYINT                1字节
+# $SMALLINT                 对应PHOENIX里的SMALLINT               2字节
+# $FLOAT                    对应PHOENIX里的FLOAT                  4字节
+# $DOUBLE                    对应PHOENIX里的DOUBLE                 8字节
+# $BOOLEAN                  对应PHOENIX里的BOOLEAN                1字节
+# $TIME                     对应PHOENIX里的TIME                   8字节
+# $DATE                     对应PHOENIX里的DATE                   8字节
+# $TIMESTAMP                对应PHOENIX里的TIMESTAMP              12字节
+# $UNSIGNED_TIME            对应PHOENIX里的UNSIGNED_TIME          8字节
+# $UNSIGNED_DATE            对应PHOENIX里的UNSIGNED_DATE          8字节
+# $UNSIGNED_TIMESTAMP       对应PHOENIX里的UNSIGNED_TIMESTAMP     12字节
+# $VARCHAR                  对应PHOENIX里的VARCHAR                动态长度
+# $VARBINARY                对应PHOENIX里的VARBINARY              动态长度
+# $DECIMAL                  对应PHOENIX里的DECIMAL                动态长度

+ 261 - 0
client-adapter/launcher/pom.xml

@@ -0,0 +1,261 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.launcher</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter launcher module for otter ${project.version}</name>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-dependencies</artifactId>
+                <version>2.0.1.RELEASE</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>canal.client</artifactId>
+            <version>1.1.3-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.19</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-configuration-processor</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.cloud</groupId>
+            <artifactId>spring-cloud-context</artifactId>
+            <version>2.0.0.RELEASE</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-recipes</artifactId>
+            <version>2.10.0</version>
+        </dependency>
+        <!-- 单独引入rocketmq依赖 -->
+        <dependency>
+            <groupId>org.apache.rocketmq</groupId>
+            <artifactId>rocketmq-client</artifactId>
+            <version>4.3.0</version>
+        </dependency>
+        <!-- 单独引入kafka依赖 -->
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>1.1.1</version>
+        </dependency>
+
+        <!-- jdbc -->
+        <dependency>
+            <groupId>org.postgresql</groupId>
+            <artifactId>postgresql</artifactId>
+            <version>42.1.4</version>
+        </dependency>
+        <dependency>
+            <groupId>com.oracle</groupId>
+            <artifactId>ojdbc6</artifactId>
+            <version>11.2.0.3</version>
+        </dependency>
+        <dependency>
+            <groupId>com.microsoft.sqlserver</groupId>
+            <artifactId>mssql-jdbc</artifactId>
+            <version>7.0.0.jre8</version>
+        </dependency>
+
+        <!-- outer adapter jar with dependencies-->
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.logger</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>*</artifactId>
+                    <groupId>*</groupId>
+                </exclusion>
+            </exclusions>
+            <classifier>jar-with-dependencies</classifier>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.hbase</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>*</artifactId>
+                    <groupId>*</groupId>
+                </exclusion>
+            </exclusions>
+            <classifier>jar-with-dependencies</classifier>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.elasticsearch</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>*</artifactId>
+                    <groupId>*</groupId>
+                </exclusion>
+            </exclusions>
+            <classifier>jar-with-dependencies</classifier>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.rdb</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>*</artifactId>
+                    <groupId>*</groupId>
+                </exclusion>
+            </exclusions>
+            <classifier>jar-with-dependencies</classifier>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <version>2.10</version>
+                <executions>
+                    <execution>
+                        <id>copy-dependencies-to-canal-client-service</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>copy-dependencies</goal>
+                        </goals>
+                        <configuration>
+                            <includeClassifiers>jar-with-dependencies</includeClassifiers>
+                            <outputDirectory>${project.basedir}/target/canal-adapter/plugin</outputDirectory>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.2.1</version>
+                <executions>
+                    <execution>
+                        <id>assemble</id>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <phase>package</phase>
+                    </execution>
+                </executions>
+                <configuration>
+                    <appendAssemblyId>false</appendAssemblyId>
+                    <attach>false</attach>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <version>3.0.2</version>
+                <configuration>
+                    <archive>
+                        <manifest>
+                            <addClasspath>true</addClasspath>
+                            <classpathPrefix>lib/</classpathPrefix>
+                            <mainClass>com.alibaba.otter.canal.adapter.launcher.CanalAdapterApplication</mainClass>
+                        </manifest>
+                    </archive>
+                    <excludes>
+                        <exclude>**/*.properties</exclude>
+                        <exclude>**/*.xml</exclude>
+                        <exclude>**/*.yml</exclude>
+                        <exclude>static/**</exclude>
+                        <exclude>templates/**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+    <profiles>
+        <profile>
+            <id>dev</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+                <property>
+                    <name>env</name>
+                    <value>!release</value>
+                </property>
+            </activation>
+
+            <build>
+                <plugins>
+                    <plugin>
+                        <artifactId>maven-assembly-plugin</artifactId>
+                        <configuration>
+                            <descriptors>
+                                <descriptor>${basedir}/src/main/assembly/dev.xml</descriptor>
+                            </descriptors>
+                            <finalName>canal-adapter</finalName>
+                            <outputDirectory>${project.build.directory}</outputDirectory>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+
+        </profile>
+
+        <profile>
+            <id>release</id>
+            <activation>
+                <property>
+                    <name>env</name>
+                    <value>release</value>
+                </property>
+            </activation>
+
+            <build>
+                <plugins>
+                    <plugin>
+                        <artifactId>maven-assembly-plugin</artifactId>
+                        <configuration>
+                            <descriptors>
+                                <descriptor>${basedir}/src/main/assembly/release.xml</descriptor>
+                            </descriptors>
+                            <finalName>canal.adapter-${project.version}</finalName>
+                            <outputDirectory>${project.basedir}/../../target</outputDirectory>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+</project>

+ 69 - 0
client-adapter/launcher/src/main/assembly/dev.xml

@@ -0,0 +1,69 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+	<id>dist</id>
+	<formats>
+		<format>dir</format>
+	</formats>
+	<includeBaseDirectory>false</includeBaseDirectory>
+	<fileSets>
+        <fileSet>
+            <directory>.</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>README*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/bin</directory>
+            <outputDirectory>bin</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+            <fileMode>0755</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/resources</directory>
+            <outputDirectory>/conf</outputDirectory>
+            <includes>
+                <include>**/*</include>
+
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../elasticsearch/src/main/resources/es</directory>
+            <outputDirectory>/conf/es</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../hbase/src/main/resources/hbase</directory>
+            <outputDirectory>/conf/hbase</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../rdb/src/main/resources/</directory>
+            <outputDirectory>/conf</outputDirectory>
+            <excludes>
+                <exclude>META-INF/**</exclude>
+            </excludes>
+        </fileSet>
+        <fileSet>
+            <directory>target</directory>
+            <outputDirectory>logs</outputDirectory>
+            <excludes>
+                <exclude>**/*</exclude>
+            </excludes>
+        </fileSet>
+    </fileSets>
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>lib</outputDirectory>
+            <excludes>
+                <exclude>junit:junit</exclude>
+            </excludes>
+        </dependencySet>
+    </dependencySets>
+</assembly>

+ 74 - 0
client-adapter/launcher/src/main/assembly/release.xml

@@ -0,0 +1,74 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+    <id>dist</id>
+    <formats>
+        <format>tar.gz</format>
+    </formats>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <fileSets>
+        <fileSet>
+            <directory>.</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>README*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/bin</directory>
+            <outputDirectory>bin</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+            <fileMode>0755</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/resources</directory>
+            <outputDirectory>/conf</outputDirectory>
+            <includes>
+                <include>**/*</include>
+
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../elasticsearch/src/main/resources/es</directory>
+            <outputDirectory>/conf/es</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../hbase/src/main/resources/hbase</directory>
+            <outputDirectory>/conf/hbase</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>../rdb/src/main/resources/</directory>
+            <outputDirectory>/conf</outputDirectory>
+            <excludes>
+                <exclude>META-INF/**</exclude>
+            </excludes>
+        </fileSet>
+        <fileSet>
+            <directory>target</directory>
+            <outputDirectory>logs</outputDirectory>
+            <excludes>
+                <exclude>**/*</exclude>
+            </excludes>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/canal-adapter/plugin</directory>
+            <outputDirectory>/plugin/</outputDirectory>
+        </fileSet>
+    </fileSets>
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>lib</outputDirectory>
+            <excludes>
+                <exclude>junit:junit</exclude>
+            </excludes>
+        </dependencySet>
+    </dependencySets>
+</assembly>

+ 5 - 0
client-adapter/launcher/src/main/bin/restart.sh

@@ -0,0 +1,5 @@
+#!/bin/bash
+
+sh stop.sh
+
+sh startup.sh

+ 22 - 0
client-adapter/launcher/src/main/bin/startup.bat

@@ -0,0 +1,22 @@
+@echo off
+@if not "%ECHO%" == ""  echo %ECHO%
+@if "%OS%" == "Windows_NT"  setlocal
+
+set ENV_PATH=.\
+if "%OS%" == "Windows_NT" set ENV_PATH=%~dp0%
+
+set conf_dir=%ENV_PATH%\..\conf
+
+set CLASSPATH=%conf_dir%
+set CLASSPATH=%conf_dir%\..\lib\*;%CLASSPATH%
+
+set JAVA_MEM_OPTS= -Xms128m -Xmx512m -XX:PermSize=128m
+set JAVA_OPTS_EXT= -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dapplication.codeset=UTF-8 -Dfile.encoding=UTF-8
+set ADAPTER_OPTS= -DappName=canal-adapter
+
+set JAVA_OPTS= %JAVA_MEM_OPTS% %JAVA_OPTS_EXT% %ADAPTER_OPTS%
+
+set CMD_STR= java %JAVA_OPTS% -classpath "%CLASSPATH%" com.alibaba.otter.canal.adapter.launcher.CanalAdapterApplication
+echo start cmd : %CMD_STR%
+
+java %JAVA_OPTS% -classpath "%CLASSPATH%" com.alibaba.otter.canal.adapter.launcher.CanalAdapterApplication

+ 67 - 0
client-adapter/launcher/src/main/bin/startup.sh

@@ -0,0 +1,67 @@
+#!/bin/bash
+
+current_path=`pwd`
+case "`uname`" in
+    Linux)
+		bin_abs_path=$(readlink -f $(dirname $0))
+		;;
+	*)
+		bin_abs_path=`cd $(dirname $0); pwd`
+		;;
+esac
+base=${bin_abs_path}/..
+export LANG=en_US.UTF-8
+export BASE=$base
+
+if [ -f $base/bin/adapter.pid ] ; then
+	echo "found adapter.pid , Please run stop.sh first ,then startup.sh" 2>&2
+    exit 1
+fi
+
+if [ ! -d $base/logs ] ; then
+	mkdir -p $base/logs
+fi
+
+## set java path
+if [ -z "$JAVA" ] ; then
+  JAVA=$(which java)
+fi
+
+ALIBABA_JAVA="/usr/alibaba/java/bin/java"
+TAOBAO_JAVA="/opt/taobao/java/bin/java"
+if [ -z "$JAVA" ]; then
+  if [ -f $ALIBABA_JAVA ] ; then
+  	JAVA=$ALIBABA_JAVA
+  elif [ -f $TAOBAO_JAVA ] ; then
+  	JAVA=$TAOBAO_JAVA
+  else
+  	echo "Cannot find a Java JDK. Please set either set JAVA or put java (>=1.5) in your PATH." 2>&2
+    exit 1
+  fi
+fi
+
+str=`file -L $JAVA | grep 64-bit`
+if [ -n "$str" ]; then
+	JAVA_OPTS="-server -Xms2048m -Xmx3072m -Xmn1024m -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -Xss256k -XX:-UseAdaptiveSizePolicy -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError"
+else
+	JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m "
+fi
+
+JAVA_OPTS=" $JAVA_OPTS -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dfile.encoding=UTF-8"
+ADAPTER_OPTS="-DappName=canal-adapter"
+
+for i in $base/lib/*;
+    do CLASSPATH=$i:"$CLASSPATH";
+done
+
+CLASSPATH="$base/conf:$CLASSPATH";
+
+echo "cd to $bin_abs_path for workaround relative path"
+cd $bin_abs_path
+
+echo CLASSPATH :$CLASSPATH
+$JAVA $JAVA_OPTS $ADAPTER_OPTS -classpath .:$CLASSPATH com.alibaba.otter.canal.adapter.launcher.CanalAdapterApplication 1>>$base/logs/adapter.log 2>&1 &
+echo $! > $base/bin/adapter.pid
+
+echo "cd to $current_path for continue"
+cd $current_path

+ 4 - 4
kafka/src/main/bin/stop.sh → client-adapter/launcher/src/main/bin/stop.sh

@@ -37,15 +37,15 @@ get_pid() {
 }
 
 base=`dirname $0`/..
-pidfile=$base/bin/canal.pid
+pidfile=$base/bin/adapter.pid
 if [ ! -f "$pidfile" ];then
-	echo "canal is not running. exists"
+	echo "canal-adapter is not running. exists"
 	exit
 fi
 
 pid=`cat $pidfile`
 if [ "$pid" == "" ] ; then
-	pid=`get_pid "appName=otter-canal"`
+	pid=`get_pid "appName=canal-adapter"`
 fi
 
 echo -e "`hostname`: stopping canal $pid ... "
@@ -54,7 +54,7 @@ kill $pid
 LOOPS=0
 while (true); 
 do 
-	gpid=`get_pid "appName=otter-canal" "$pid"`
+	gpid=`get_pid "appName=canal-adapter" "$pid"`
     if [ "$gpid" == "" ] ; then
     	echo "Oook! cost:$LOOPS"
     	`rm $pidfile`

+ 21 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java

@@ -0,0 +1,21 @@
+package com.alibaba.otter.canal.adapter.launcher;
+
+import org.springframework.boot.Banner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * 启动入口
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@SpringBootApplication
+public class CanalAdapterApplication {
+
+    public static void main(String[] args) {
+        SpringApplication application = new SpringApplication(CanalAdapterApplication.class);
+        application.setBannerMode(Banner.Mode.OFF);
+        application.run(args);
+    }
+}

+ 120 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/EtlLock.java

@@ -0,0 +1,120 @@
+package com.alibaba.otter.canal.adapter.launcher.common;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.otter.canal.adapter.launcher.config.CuratorClient;
+
+/**
+ * Etl 同步锁
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+public class EtlLock {
+
+    private static final Map<String, ReentrantLock>     LOCAL_LOCK       = new ConcurrentHashMap<>();
+
+    private static final Map<String, InterProcessMutex> DISTRIBUTED_LOCK = new ConcurrentHashMap<>();
+
+    private static Mode                                 mode             = Mode.LOCAL;
+
+    @Resource
+    private CuratorClient                               curatorClient;
+
+    @PostConstruct
+    public void init() {
+        CuratorFramework curator = curatorClient.getCurator();
+        if (curator != null) {
+            mode = Mode.DISTRIBUTED;
+        } else {
+            mode = Mode.LOCAL;
+        }
+    }
+
+    private ReentrantLock getLock(String key) {
+        ReentrantLock lock = LOCAL_LOCK.get(key);
+        if (lock == null) {
+            synchronized (EtlLock.class) {
+                lock = LOCAL_LOCK.get(key);
+                if (lock == null) {
+                    lock = new ReentrantLock();
+                    LOCAL_LOCK.put(key, lock);
+                }
+            }
+        }
+        return lock;
+    }
+
+    private InterProcessMutex getRemoteLock(String key) {
+        InterProcessMutex lock = DISTRIBUTED_LOCK.get(key);
+        if (lock == null) {
+            synchronized (EtlLock.class) {
+                lock = DISTRIBUTED_LOCK.get(key);
+                if (lock == null) {
+                    lock = new InterProcessMutex(curatorClient.getCurator(), key);
+                    DISTRIBUTED_LOCK.put(key, lock);
+                }
+            }
+        }
+        return lock;
+    }
+
+    public void lock(String key) throws Exception {
+        if (mode == Mode.LOCAL) {
+            getLock(key).lock();
+        } else {
+            InterProcessMutex lock = getRemoteLock(key);
+            lock.acquire();
+        }
+    }
+
+    public boolean tryLock(String key, long timeout, TimeUnit unit) {
+        try {
+            if (mode == Mode.LOCAL) {
+                return getLock(key).tryLock(timeout, unit);
+            } else {
+                InterProcessMutex lock = getRemoteLock(key);
+                return lock.acquire(timeout, unit);
+            }
+        } catch (Exception e) {
+            return false;
+        }
+    }
+
+    public boolean tryLock(String key) {
+        try {
+            if (mode == Mode.LOCAL) {
+                return getLock(key).tryLock();
+            } else {
+                InterProcessMutex lock = getRemoteLock(key);
+                return lock.acquire(500, TimeUnit.MILLISECONDS);
+            }
+        } catch (Exception e) {
+            return false;
+        }
+    }
+
+    public void unlock(String key) {
+        if (mode == Mode.LOCAL) {
+            getLock(key).unlock();
+        } else {
+            InterProcessMutex lock = getRemoteLock(key);
+            try {
+                lock.release();
+            } catch (Exception e) {
+                // ignore
+            }
+        }
+    }
+}

+ 6 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/Mode.java

@@ -0,0 +1,6 @@
+package com.alibaba.otter.canal.adapter.launcher.common;
+
+public enum Mode {
+                  LOCAL, // 本地模式
+                  DISTRIBUTED // 分布式
+}

+ 214 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/common/SyncSwitch.java

@@ -0,0 +1,214 @@
+package com.alibaba.otter.canal.adapter.launcher.common;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.cache.NodeCache;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.data.Stat;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.otter.canal.adapter.launcher.config.AdapterCanalConfig;
+import com.alibaba.otter.canal.adapter.launcher.config.CuratorClient;
+import com.alibaba.otter.canal.common.utils.BooleanMutex;
+
+/**
+ * 同步开关
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+public class SyncSwitch {
+
+    private static final String                    SYN_SWITCH_ZK_NODE = "/sync-switch/";
+
+    private static final Map<String, BooleanMutex> LOCAL_LOCK         = new ConcurrentHashMap<>();
+
+    private static final Map<String, BooleanMutex> DISTRIBUTED_LOCK   = new ConcurrentHashMap<>();
+
+    private static Mode                            mode               = Mode.LOCAL;
+
+    @Resource
+    private AdapterCanalConfig                     adapterCanalConfig;
+    @Resource
+    private CuratorClient                          curatorClient;
+
+    @PostConstruct
+    public void init() {
+        CuratorFramework curator = curatorClient.getCurator();
+        if (curator != null) {
+            mode = Mode.DISTRIBUTED;
+            DISTRIBUTED_LOCK.clear();
+            for (String destination : adapterCanalConfig.DESTINATIONS) {
+                // 对应每个destination注册锁
+                BooleanMutex mutex = new BooleanMutex(true);
+                initMutex(curator, destination, mutex);
+                DISTRIBUTED_LOCK.put(destination, mutex);
+                startListen(destination, mutex);
+            }
+        } else {
+            mode = Mode.LOCAL;
+            LOCAL_LOCK.clear();
+            for (String destination : adapterCanalConfig.DESTINATIONS) {
+                // 对应每个destination注册锁
+                LOCAL_LOCK.put(destination, new BooleanMutex(true));
+            }
+        }
+    }
+
+    @SuppressWarnings("resource")
+    private synchronized void startListen(String destination, BooleanMutex mutex) {
+        try {
+            String path = SYN_SWITCH_ZK_NODE + destination;
+            CuratorFramework curator = curatorClient.getCurator();
+            NodeCache nodeCache = new NodeCache(curator, path);
+            nodeCache.start();
+            nodeCache.getListenable().addListener(() -> initMutex(curator, destination, mutex));
+        } catch (Exception e) {
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    private synchronized void initMutex(CuratorFramework curator, String destination, BooleanMutex mutex) {
+        try {
+            String path = SYN_SWITCH_ZK_NODE + destination;
+            Stat stat = curator.checkExists().forPath(path);
+            if (stat == null) {
+                if (!mutex.state()) {
+                    mutex.set(true);
+                }
+            } else {
+                String data = new String(curator.getData().forPath(path), StandardCharsets.UTF_8);
+                if ("on".equals(data)) {
+                    if (!mutex.state()) {
+                        mutex.set(true);
+                    }
+                } else {
+                    if (mutex.state()) {
+                        mutex.set(false);
+                    }
+                }
+            }
+        } catch (Exception e) {
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    public synchronized void off(String destination) {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null && mutex.state()) {
+                mutex.set(false);
+            }
+        } else {
+            try {
+                String path = SYN_SWITCH_ZK_NODE + destination;
+                try {
+                    curatorClient.getCurator()
+                        .create()
+                        .creatingParentContainersIfNeeded()
+                        .withMode(CreateMode.PERSISTENT)
+                        .forPath(path, "off".getBytes(StandardCharsets.UTF_8));
+                } catch (Exception e) {
+                    curatorClient.getCurator().setData().forPath(path, "off".getBytes(StandardCharsets.UTF_8));
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    public synchronized void on(String destination) {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null && !mutex.state()) {
+                mutex.set(true);
+            }
+        } else {
+            try {
+                String path = SYN_SWITCH_ZK_NODE + destination;
+                try {
+                    curatorClient.getCurator()
+                        .create()
+                        .creatingParentContainersIfNeeded()
+                        .withMode(CreateMode.PERSISTENT)
+                        .forPath(path, "on".getBytes(StandardCharsets.UTF_8));
+                } catch (Exception e) {
+                    curatorClient.getCurator().setData().forPath(path, "on".getBytes(StandardCharsets.UTF_8));
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    public synchronized void release(String destination) {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null && !mutex.state()) {
+                mutex.set(true);
+            }
+        }
+        if (mode == Mode.DISTRIBUTED) {
+            BooleanMutex mutex = DISTRIBUTED_LOCK.get(destination);
+            if (mutex != null && !mutex.state()) {
+                mutex.set(true);
+            }
+        }
+    }
+
+    public Boolean status(String destination) {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null) {
+                return mutex.state();
+            } else {
+                return null;
+            }
+        } else {
+            BooleanMutex mutex = DISTRIBUTED_LOCK.get(destination);
+            if (mutex != null) {
+                return mutex.state();
+            } else {
+                return null;
+            }
+        }
+    }
+
+    public void get(String destination) throws InterruptedException {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null) {
+                mutex.get();
+            }
+        } else {
+            BooleanMutex mutex = DISTRIBUTED_LOCK.get(destination);
+            if (mutex != null) {
+                mutex.get();
+            }
+        }
+    }
+
+    public void get(String destination, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
+        if (mode == Mode.LOCAL) {
+            BooleanMutex mutex = LOCAL_LOCK.get(destination);
+            if (mutex != null) {
+                mutex.get(timeout, unit);
+            }
+        } else {
+            BooleanMutex mutex = DISTRIBUTED_LOCK.get(destination);
+            if (mutex != null) {
+                mutex.get(timeout, unit);
+            }
+        }
+    }
+
+}

+ 79 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/AdapterCanalConfig.java

@@ -0,0 +1,79 @@
+package com.alibaba.otter.canal.adapter.launcher.config;
+
+import java.sql.SQLException;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+
+/**
+ * canal 的相关配置类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+@ConfigurationProperties(prefix = "canal.conf")
+public class AdapterCanalConfig extends CanalClientConfig {
+
+    public final Set<String>              DESTINATIONS = new LinkedHashSet<>();
+
+    private Map<String, DatasourceConfig> srcDataSources;
+
+    @Override
+    public void setCanalAdapters(List<CanalAdapter> canalAdapters) {
+        super.setCanalAdapters(canalAdapters);
+
+        if (canalAdapters != null) {
+            synchronized (DESTINATIONS) {
+                DESTINATIONS.clear();
+                for (CanalAdapter canalAdapter : canalAdapters) {
+                    if (canalAdapter.getInstance() != null) {
+                        DESTINATIONS.add(canalAdapter.getInstance());
+                    }
+                }
+            }
+        }
+    }
+
+    public Map<String, DatasourceConfig> getSrcDataSources() {
+        return srcDataSources;
+    }
+
+    @SuppressWarnings("resource")
+    public void setSrcDataSources(Map<String, DatasourceConfig> srcDataSources) {
+        this.srcDataSources = srcDataSources;
+
+        if (srcDataSources != null) {
+            for (Map.Entry<String, DatasourceConfig> entry : srcDataSources.entrySet()) {
+                DatasourceConfig datasourceConfig = entry.getValue();
+                // 加载数据源连接池
+                DruidDataSource ds = new DruidDataSource();
+                ds.setDriverClassName(datasourceConfig.getDriver());
+                ds.setUrl(datasourceConfig.getUrl());
+                ds.setUsername(datasourceConfig.getUsername());
+                ds.setPassword(datasourceConfig.getPassword());
+                ds.setInitialSize(1);
+                ds.setMinIdle(1);
+                ds.setMaxActive(datasourceConfig.getMaxActive());
+                ds.setMaxWait(60000);
+                ds.setTimeBetweenEvictionRunsMillis(60000);
+                ds.setMinEvictableIdleTimeMillis(300000);
+                ds.setValidationQuery("select 1");
+                try {
+                    ds.init();
+                } catch (SQLException e) {
+                    throw new RuntimeException(e.getMessage(), e);
+                }
+                DatasourceConfig.DATA_SOURCES.put(entry.getKey(), ds);
+            }
+        }
+    }
+}

+ 42 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/CuratorClient.java

@@ -0,0 +1,42 @@
+package com.alibaba.otter.canal.adapter.launcher.config;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.springframework.stereotype.Component;
+
+/**
+ * curator 配置类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+public class CuratorClient {
+
+    @Resource
+    private AdapterCanalConfig adapterCanalConfig;
+
+    private CuratorFramework   curator = null;
+
+    @PostConstruct
+    public void init() {
+        if (adapterCanalConfig.getZookeeperHosts() != null) {
+            curator = CuratorFrameworkFactory.builder()
+                .connectString(adapterCanalConfig.getZookeeperHosts())
+                .retryPolicy(new ExponentialBackoffRetry(1000, 3))
+                .sessionTimeoutMs(6000)
+                .connectionTimeoutMs(3000)
+                .namespace("canal-adapter")
+                .build();
+            curator.start();
+        }
+    }
+
+    public CuratorFramework getCurator() {
+        return curator;
+    }
+}

+ 34 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/config/SpringContext.java

@@ -0,0 +1,34 @@
+package com.alibaba.otter.canal.adapter.launcher.config;
+
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+/**
+ * spring util配置类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+public class SpringContext implements ApplicationContextAware {
+
+    private static ApplicationContext context;
+
+    /*
+     * 注入ApplicationContext
+     */
+    public void setApplicationContext(final ApplicationContext context) throws BeansException {
+        // 在加载Spring时自动获得context
+        SpringContext.context = context;
+    }
+
+    public static Object getBean(final String beanName) {
+        return SpringContext.context.getBean(beanName);
+    }
+
+    public static Object getBean(final Class<?> clz) {
+        return context.getBean(clz);
+    }
+}

+ 243 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AbstractCanalAdapterWorker.java

@@ -0,0 +1,243 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.adapter.launcher.common.SyncSwitch;
+import com.alibaba.otter.canal.adapter.launcher.config.SpringContext;
+import com.alibaba.otter.canal.client.CanalMQConnector;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.MessageUtil;
+import com.alibaba.otter.canal.protocol.FlatMessage;
+import com.alibaba.otter.canal.protocol.Message;
+
+/**
+ * 适配器工作线程抽象类
+ *
+ * @author rewerma 2018-8-19 下午11:30:49
+ * @version 1.0.0
+ */
+public abstract class AbstractCanalAdapterWorker {
+
+    protected final Logger                    logger  = LoggerFactory.getLogger(this.getClass());
+
+    protected String                          canalDestination;                                                // canal实例
+    protected List<List<OuterAdapter>>        canalOuterAdapters;                                              // 外部适配器
+    protected CanalClientConfig               canalClientConfig;                                               // 配置
+    protected ExecutorService                 groupInnerExecutorService;                                       // 组内工作线程池
+    protected volatile boolean                running = false;                                                 // 是否运行中
+    protected Thread                          thread  = null;
+    protected Thread.UncaughtExceptionHandler handler = (t, e) -> logger.error("parse events has an error", e);
+
+    protected SyncSwitch                      syncSwitch;
+
+    public AbstractCanalAdapterWorker(List<List<OuterAdapter>> canalOuterAdapters){
+        this.canalOuterAdapters = canalOuterAdapters;
+        this.groupInnerExecutorService = Executors.newFixedThreadPool(canalOuterAdapters.size());
+        syncSwitch = (SyncSwitch) SpringContext.getBean(SyncSwitch.class);
+    }
+
+    protected void writeOut(final Message message) {
+        List<Future<Boolean>> futures = new ArrayList<>();
+        // 组间适配器并行运行
+        canalOuterAdapters.forEach(outerAdapters -> {
+            final List<OuterAdapter> adapters = outerAdapters;
+            futures.add(groupInnerExecutorService.submit(() -> {
+                try {
+                    // 组内适配器穿行运行,尽量不要配置组内适配器
+                    adapters.forEach(adapter -> {
+                        long begin = System.currentTimeMillis();
+                        List<Dml> dmls = MessageUtil.parse4Dml(canalDestination, message);
+                        if (dmls != null) {
+                            batchSync(dmls, adapter);
+
+                            if (logger.isDebugEnabled()) {
+                                logger.debug("{} elapsed time: {}",
+                                    adapter.getClass().getName(),
+                                    (System.currentTimeMillis() - begin));
+                            }
+                        }
+                    });
+                    return true;
+                } catch (Exception e) {
+                    return false;
+                }
+            }));
+
+            // 等待所有适配器写入完成
+            // 由于是组间并发操作,所以将阻塞直到耗时最久的工作组操作完成
+            futures.forEach(future -> {
+                try {
+                    if (!future.get()) {
+                        logger.error("Outer adapter write failed");
+                    }
+                } catch (InterruptedException | ExecutionException e) {
+                    // ignore
+                }
+            });
+        });
+    }
+
+    private void writeOut(final List<FlatMessage> flatMessages) {
+        List<Future<Boolean>> futures = new ArrayList<>();
+        // 组间适配器并行运行
+        canalOuterAdapters.forEach(outerAdapters -> {
+            futures.add(groupInnerExecutorService.submit(() -> {
+                try {
+                    // 组内适配器穿行运行,尽量不要配置组内适配器
+                    outerAdapters.forEach(adapter -> {
+                        long begin = System.currentTimeMillis();
+                        List<Dml> dmls = MessageUtil.flatMessage2Dml(canalDestination, flatMessages);
+                        batchSync(dmls, adapter);
+
+                        if (logger.isDebugEnabled()) {
+                            logger.debug("{} elapsed time: {}",
+                                adapter.getClass().getName(),
+                                (System.currentTimeMillis() - begin));
+                        }
+                    });
+                    return true;
+                } catch (Exception e) {
+                    return false;
+                }
+            }));
+
+            // 等待所有适配器写入完成
+            // 由于是组间并发操作,所以将阻塞直到耗时最久的工作组操作完成
+            futures.forEach(future -> {
+                try {
+                    if (!future.get()) {
+                        logger.error("Outer adapter write failed");
+                    }
+                } catch (InterruptedException | ExecutionException e) {
+                    // ignore
+                }
+            });
+        });
+    }
+
+    @SuppressWarnings("unchecked")
+    protected void mqWriteOutData(int retry, long timeout, final boolean flatMessage, CanalMQConnector connector,
+                                  ExecutorService workerExecutor) {
+        for (int i = 0; i < retry; i++) {
+            try {
+                List<?> messages;
+                if (!flatMessage) {
+                    messages = connector.getListWithoutAck(100L, TimeUnit.MILLISECONDS);
+                } else {
+                    messages = connector.getFlatListWithoutAck(100L, TimeUnit.MILLISECONDS);
+                }
+                if (messages != null) {
+                    Future<Boolean> future = workerExecutor.submit(() -> {
+                        if (flatMessage) {
+                            // batch write
+                            writeOut((List<FlatMessage>) messages);
+                        } else {
+                            for (final Object message : messages) {
+                                writeOut((Message) message);
+                            }
+                        }
+                        return true;
+                    });
+
+                    try {
+                        future.get(timeout, TimeUnit.MILLISECONDS);
+                    } catch (Exception e) {
+                        future.cancel(true);
+                        throw e;
+                    }
+                }
+                connector.ack();
+                break;
+            } catch (Throwable e) {
+                if (i == retry - 1) {
+                    connector.ack();
+                } else {
+                    connector.rollback();
+                }
+
+                logger.error(e.getMessage(), e);
+                try {
+                    TimeUnit.SECONDS.sleep(1L);
+                } catch (InterruptedException e1) {
+                    // ignore
+                }
+            }
+        }
+    }
+
+    /**
+     * 分批同步
+     * 
+     * @param dmls
+     * @param adapter
+     */
+    private void batchSync(List<Dml> dmls, OuterAdapter adapter) {
+        // 分批同步
+        if (dmls.size() <= canalClientConfig.getSyncBatchSize()) {
+            adapter.sync(dmls);
+        } else {
+            int len = 0;
+            List<Dml> dmlsBatch = new ArrayList<>();
+            for (Dml dml : dmls) {
+                dmlsBatch.add(dml);
+                len += dml.getData().size();
+                if (len >= canalClientConfig.getSyncBatchSize()) {
+                    adapter.sync(dmlsBatch);
+                    dmlsBatch.clear();
+                    len = 0;
+                }
+            }
+            adapter.sync(dmlsBatch);
+        }
+    }
+
+    public void start() {
+        if (!running) {
+            thread = new Thread(this::process);
+            thread.setUncaughtExceptionHandler(handler);
+            thread.start();
+            running = true;
+        }
+    }
+
+    protected abstract void process();
+
+    public void stop() {
+        try {
+            if (!running) {
+                return;
+            }
+
+            running = false;
+
+            syncSwitch.release(canalDestination);
+
+            logger.info("destination {} is waiting for adapters' worker thread die!", canalDestination);
+            if (thread != null) {
+                try {
+                    thread.join();
+                } catch (InterruptedException e) {
+                    // ignore
+                }
+            }
+            groupInnerExecutorService.shutdown();
+            logger.info("destination {} adapters worker thread dead!", canalDestination);
+            canalOuterAdapters.forEach(outerAdapters -> outerAdapters.forEach(OuterAdapter::destroy));
+            logger.info("destination {} all adapters destroyed!", canalDestination);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+}

+ 79 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterKafkaWorker.java

@@ -0,0 +1,79 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.kafka.common.errors.WakeupException;
+
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.kafka.KafkaCanalConnector;
+
+/**
+ * kafka对应的client适配器工作线程
+ *
+ * @author rewerma 2018-8-19 下午11:30:49
+ * @version 1.0.0
+ */
+public class CanalAdapterKafkaWorker extends AbstractCanalAdapterWorker {
+
+    private KafkaCanalConnector connector;
+    private String              topic;
+    private boolean             flatMessage;
+
+    public CanalAdapterKafkaWorker(CanalClientConfig canalClientConfig, String bootstrapServers, String topic,
+                                   String groupId, List<List<OuterAdapter>> canalOuterAdapters, boolean flatMessage){
+        super(canalOuterAdapters);
+        this.canalClientConfig = canalClientConfig;
+        this.topic = topic;
+        this.canalDestination = topic;
+        this.flatMessage = flatMessage;
+        this.connector = new KafkaCanalConnector(bootstrapServers,
+            topic,
+            null,
+            groupId,
+            canalClientConfig.getBatchSize(),
+            flatMessage);
+        connector.setSessionTimeout(30L, TimeUnit.SECONDS);
+    }
+
+    @Override
+    protected void process() {
+        while (!running)
+            ;
+        ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
+        int retry = canalClientConfig.getRetries() == null || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
+        long timeout = canalClientConfig.getTimeout() == null ? 30000 : canalClientConfig.getTimeout(); // 默认超时30秒
+
+        while (running) {
+            try {
+                syncSwitch.get(canalDestination);
+                logger.info("=============> Start to connect topic: {} <=============", this.topic);
+                connector.connect();
+                logger.info("=============> Start to subscribe topic: {} <=============", this.topic);
+                connector.subscribe();
+                logger.info("=============> Subscribe topic: {} succeed <=============", this.topic);
+                while (running) {
+                    Boolean status = syncSwitch.status(canalDestination);
+                    if (status != null && !status) {
+                        connector.disconnect();
+                        break;
+                    }
+                    mqWriteOutData(retry, timeout, flatMessage, connector, workerExecutor);
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        try {
+            connector.unsubscribe();
+        } catch (WakeupException e) {
+            // No-op. Continue process
+        }
+        connector.disconnect();
+        logger.info("=============> Disconnect topic: {} <=============", this.topic);
+    }
+}

+ 198 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java

@@ -0,0 +1,198 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+
+/**
+ * 外部适配器的加载器
+ *
+ * @version 1.0.0
+ */
+public class CanalAdapterLoader {
+
+    private static final Logger                     logger        = LoggerFactory.getLogger(CanalAdapterLoader.class);
+
+    private CanalClientConfig                       canalClientConfig;
+
+    private Map<String, CanalAdapterWorker>         canalWorkers  = new HashMap<>();
+
+    private Map<String, AbstractCanalAdapterWorker> canalMQWorker = new HashMap<>();
+
+    private ExtensionLoader<OuterAdapter>           loader;
+
+    public CanalAdapterLoader(CanalClientConfig canalClientConfig){
+        this.canalClientConfig = canalClientConfig;
+    }
+
+    /**
+     * 初始化canal-client
+     */
+    public void init() {
+        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
+
+        String canalServerHost = this.canalClientConfig.getCanalServerHost();
+        SocketAddress sa = null;
+        if (canalServerHost != null) {
+            String[] ipPort = canalServerHost.split(":");
+            sa = new InetSocketAddress(ipPort[0], Integer.parseInt(ipPort[1]));
+        }
+        String zkHosts = this.canalClientConfig.getZookeeperHosts();
+
+        if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+
+                for (CanalClientConfig.Group connectorGroup : canalAdapter.getGroups()) {
+                    List<OuterAdapter> canalOutConnectors = new ArrayList<>();
+                    for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) {
+                        loadConnector(c, canalOutConnectors);
+                    }
+                    canalOuterAdapterGroups.add(canalOutConnectors);
+                }
+                CanalAdapterWorker worker;
+                if (sa != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        sa,
+                        canalOuterAdapterGroups);
+                } else if (zkHosts != null) {
+                    worker = new CanalAdapterWorker(canalClientConfig,
+                        canalAdapter.getInstance(),
+                        zkHosts,
+                        canalOuterAdapterGroups);
+                } else {
+                    throw new RuntimeException("No canal server connector found");
+                }
+                canalWorkers.put(canalAdapter.getInstance(), worker);
+                worker.start();
+                logger.info("Start adapter for canal instance: {} succeed", canalAdapter.getInstance());
+            }
+        } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-kafka的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadConnector(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+
+                    CanalAdapterKafkaWorker canalKafkaWorker = new CanalAdapterKafkaWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + group.getGroupId(), canalKafkaWorker);
+                    canalKafkaWorker.start();
+                    logger.info("Start adapter for canal-client mq topic: {} succeed", canalAdapter.getInstance() + "-"
+                                                                                       + group.getGroupId());
+                }
+            }
+        } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) {
+            // 初始化canal-client-rocketMQ的适配器
+            for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) {
+                for (CanalClientConfig.Group group : canalAdapter.getGroups()) {
+                    List<List<OuterAdapter>> canalOuterAdapterGroups = new ArrayList<>();
+                    List<OuterAdapter> canalOuterAdapters = new ArrayList<>();
+                    for (OuterAdapterConfig config : group.getOuterAdapters()) {
+                        loadConnector(config, canalOuterAdapters);
+                    }
+                    canalOuterAdapterGroups.add(canalOuterAdapters);
+                    CanalAdapterRocketMQWorker rocketMQWorker = new CanalAdapterRocketMQWorker(canalClientConfig,
+                        canalClientConfig.getMqServers(),
+                        canalAdapter.getInstance(),
+                        group.getGroupId(),
+                        canalOuterAdapterGroups,
+                        canalClientConfig.getAccessKey(),
+                        canalClientConfig.getSecretKey(),
+                        canalClientConfig.getFlatMessage());
+                    canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + group.getGroupId(), rocketMQWorker);
+                    rocketMQWorker.start();
+
+                    logger.info("Start adapter for canal-client mq topic: {} succeed", canalAdapter.getInstance() + "-"
+                                                                                       + group.getGroupId());
+                }
+            }
+        }
+    }
+
+    private void loadConnector(OuterAdapterConfig config, List<OuterAdapter> canalOutConnectors) {
+        try {
+            OuterAdapter adapter;
+            adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey()));
+
+            ClassLoader cl = Thread.currentThread().getContextClassLoader();
+            // 替换ClassLoader
+            Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader());
+            adapter.init(config);
+            Thread.currentThread().setContextClassLoader(cl);
+            canalOutConnectors.add(adapter);
+            logger.info("Load canal adapter: {} succeed", config.getName());
+        } catch (Exception e) {
+            logger.error("Load canal adapter: {} failed", config.getName(), e);
+        }
+    }
+
+    /**
+     * 销毁所有适配器 为防止canal实例太多造成销毁阻塞, 并行销毁
+     */
+    public void destroy() {
+        if (!canalWorkers.isEmpty()) {
+            ExecutorService stopExecutorService = Executors.newFixedThreadPool(canalWorkers.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (CanalAdapterWorker canalAdapterWorker : canalWorkers.values()) {
+                futures.add(stopExecutorService.submit(() -> {
+                    canalAdapterWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopExecutorService.shutdown();
+        }
+
+        if (!canalMQWorker.isEmpty()) {
+            ExecutorService stopMQWorkerService = Executors.newFixedThreadPool(canalMQWorker.size());
+            List<Future<Boolean>> futures = new ArrayList<>();
+            for (AbstractCanalAdapterWorker canalAdapterMQWorker : canalMQWorker.values()) {
+                futures.add(stopMQWorkerService.submit(() -> {
+                    canalAdapterMQWorker.stop();
+                    return true;
+                }));
+            }
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    // ignore
+                }
+            });
+            stopMQWorkerService.shutdown();
+        }
+        logger.info("All canal adapters destroyed");
+    }
+}

+ 74 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterRocketMQWorker.java

@@ -0,0 +1,74 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.kafka.common.errors.WakeupException;
+
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.rocketmq.RocketMQCanalConnector;
+
+/**
+ * rocketmq对应的client适配器工作线程
+ *
+ * @version 1.0.0
+ */
+public class CanalAdapterRocketMQWorker extends AbstractCanalAdapterWorker {
+
+    private RocketMQCanalConnector connector;
+    private String                 topic;
+    private boolean                flatMessage;
+
+    public CanalAdapterRocketMQWorker(CanalClientConfig canalClientConfig, String nameServers, String topic,
+                                      String groupId, List<List<OuterAdapter>> canalOuterAdapters, String accessKey,
+                                      String secretKey, boolean flatMessage){
+        super(canalOuterAdapters);
+        this.canalClientConfig = canalClientConfig;
+        this.topic = topic;
+        this.flatMessage = flatMessage;
+        this.canalDestination = topic;
+        this.connector = new RocketMQCanalConnector(nameServers, topic, groupId, accessKey, secretKey, flatMessage);
+        logger.info("RocketMQ consumer config topic:{}, nameServer:{}, groupId:{}", topic, nameServers, groupId);
+    }
+
+    @Override
+    protected void process() {
+        while (!running)
+            ;
+
+        ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
+        int retry = canalClientConfig.getRetries() == null || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
+        long timeout = canalClientConfig.getTimeout() == null ? 30000 : canalClientConfig.getTimeout(); // 默认超时30秒
+
+        while (running) {
+            try {
+                syncSwitch.get(canalDestination);
+                logger.info("=============> Start to connect topic: {} <=============", this.topic);
+                connector.connect();
+                logger.info("=============> Start to subscribe topic: {}<=============", this.topic);
+                connector.subscribe();
+                logger.info("=============> Subscribe topic: {} succeed<=============", this.topic);
+                while (running) {
+                    Boolean status = syncSwitch.status(canalDestination);
+                    if (status != null && !status) {
+                        connector.disconnect();
+                        break;
+                    }
+                    mqWriteOutData(retry, timeout, flatMessage, connector, workerExecutor);
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        try {
+            connector.unsubscribe();
+        } catch (WakeupException e) {
+            // No-op. Continue process
+        }
+        connector.disconnect();
+        logger.info("=============> Disconnect topic: {} <=============", this.topic);
+    }
+}

+ 85 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterService.java

@@ -0,0 +1,85 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import javax.annotation.Resource;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.cloud.context.config.annotation.RefreshScope;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.adapter.launcher.common.SyncSwitch;
+import com.alibaba.otter.canal.adapter.launcher.config.AdapterCanalConfig;
+import com.alibaba.otter.canal.adapter.launcher.config.SpringContext;
+import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig;
+
+/**
+ * 适配器启动业务类
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@Component
+@RefreshScope
+public class CanalAdapterService {
+
+    private static final Logger logger  = LoggerFactory.getLogger(CanalAdapterService.class);
+
+    private CanalAdapterLoader  adapterLoader;
+
+    @Resource
+    private AdapterCanalConfig  adapterCanalConfig;
+
+    // 注入bean保证优先注册
+    @Resource
+    private SpringContext       springContext;
+    @Resource
+    private SyncSwitch          syncSwitch;
+
+    private volatile boolean    running = false;
+
+    @PostConstruct
+    public synchronized void init() {
+        if (running) {
+            return;
+        }
+        try {
+            logger.info("## start the canal client adapters.");
+            adapterLoader = new CanalAdapterLoader(adapterCanalConfig);
+            adapterLoader.init();
+            running = true;
+            logger.info("## the canal client adapters are running now ......");
+        } catch (Exception e) {
+            logger.error("## something goes wrong when starting up the canal client adapters:", e);
+        }
+    }
+
+    @PreDestroy
+    public synchronized void destroy() {
+        if (!running) {
+            return;
+        }
+        try {
+            running = false;
+            logger.info("## stop the canal client adapters");
+            if (adapterLoader != null) {
+                adapterLoader.destroy();
+                adapterLoader = null;
+            }
+            for (DruidDataSource druidDataSource : DatasourceConfig.DATA_SOURCES.values()) {
+                try {
+                    druidDataSource.close();
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+            DatasourceConfig.DATA_SOURCES.clear();
+        } catch (Throwable e) {
+            logger.warn("## something goes wrong when stopping canal client adapters:", e);
+        } finally {
+            logger.info("## canal client adapters are down.");
+        }
+    }
+}

+ 191 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterWorker.java

@@ -0,0 +1,191 @@
+package com.alibaba.otter.canal.adapter.launcher.loader;
+
+import java.net.SocketAddress;
+import java.util.List;
+import java.util.concurrent.*;
+
+import com.alibaba.otter.canal.client.CanalConnector;
+import com.alibaba.otter.canal.client.CanalConnectors;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig;
+import com.alibaba.otter.canal.client.impl.ClusterCanalConnector;
+import com.alibaba.otter.canal.client.impl.SimpleCanalConnector;
+import com.alibaba.otter.canal.protocol.Message;
+
+/**
+ * 原生canal-server对应的client适配器工作线程
+ *
+ * @author rewrema 2018-8-19 下午11:30:49
+ * @version 1.0.0
+ */
+public class CanalAdapterWorker extends AbstractCanalAdapterWorker {
+
+    private static final int  BATCH_SIZE = 50;
+    private static final int  SO_TIMEOUT = 0;
+
+    private CanalConnector    connector;
+
+    /**
+     * 单台client适配器worker的构造方法
+     *
+     * @param canalDestination canal实例名
+     * @param address canal-server地址
+     * @param canalOuterAdapters 外部适配器组
+     */
+    public CanalAdapterWorker(CanalClientConfig canalClientConfig, String canalDestination, SocketAddress address,
+                              List<List<OuterAdapter>> canalOuterAdapters){
+        super(canalOuterAdapters);
+        this.canalClientConfig = canalClientConfig;
+        this.canalDestination = canalDestination;
+        connector = CanalConnectors.newSingleConnector(address, canalDestination, "", "");
+    }
+
+    /**
+     * HA模式下client适配器worker的构造方法
+     *
+     * @param canalDestination canal实例名
+     * @param zookeeperHosts zookeeper地址
+     * @param canalOuterAdapters 外部适配器组
+     */
+    public CanalAdapterWorker(CanalClientConfig canalClientConfig, String canalDestination, String zookeeperHosts,
+                              List<List<OuterAdapter>> canalOuterAdapters){
+        super(canalOuterAdapters);
+        this.canalDestination = canalDestination;
+        this.canalClientConfig = canalClientConfig;
+        connector = CanalConnectors.newClusterConnector(zookeeperHosts, canalDestination, "", "");
+        ((ClusterCanalConnector) connector).setSoTimeout(SO_TIMEOUT);
+    }
+
+    @Override
+    protected void process() {
+        while (!running)
+            ; // waiting until running == true
+
+        ExecutorService workerExecutor = Executors.newSingleThreadExecutor();
+        int retry = canalClientConfig.getRetries() == null
+                    || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries();
+        long timeout = canalClientConfig.getTimeout() == null ? 300000 : canalClientConfig.getTimeout(); // 默认超时5分钟
+        Integer batchSize = canalClientConfig.getBatchSize();
+        if (batchSize == null) {
+            batchSize = BATCH_SIZE;
+        }
+
+        while (running) {
+            try {
+                syncSwitch.get(canalDestination);
+
+                logger.info("=============> Start to connect destination: {} <=============", this.canalDestination);
+                connector.connect();
+                logger.info("=============> Start to subscribe destination: {} <=============", this.canalDestination);
+                connector.subscribe();
+                logger.info("=============> Subscribe destination: {} succeed <=============", this.canalDestination);
+                while (running) {
+                    try {
+                        syncSwitch.get(canalDestination, 1L, TimeUnit.MINUTES);
+                    } catch (TimeoutException e) {
+                        break;
+                    }
+                    if (!running) {
+                        break;
+                    }
+
+                    for (int i = 0; i < retry; i++) {
+                        Message message = connector.getWithoutAck(batchSize); // 获取指定数量的数据
+                        long batchId = message.getId();
+                        try {
+                            int size = message.getEntries().size();
+                            if (batchId == -1 || size == 0) {
+                                Thread.sleep(500);
+                            } else {
+                                Future<Boolean> future = workerExecutor.submit(() -> {
+                                    if (logger.isDebugEnabled()) {
+                                        logger.debug("destination: {} batchId: {} batchSize: {} ",
+                                            canalDestination,
+                                            batchId,
+                                            size);
+                                    }
+                                    long begin = System.currentTimeMillis();
+                                    writeOut(message);
+                                    if (logger.isDebugEnabled()) {
+                                        logger.debug("destination: {} batchId: {} elapsed time: {} ms",
+                                            canalDestination,
+                                            batchId,
+                                            System.currentTimeMillis() - begin);
+                                    }
+                                    return true;
+                                });
+
+                                try {
+                                    future.get(timeout, TimeUnit.MILLISECONDS);
+                                } catch (Exception e) {
+                                    future.cancel(true);
+                                    throw e;
+                                }
+                            }
+                            connector.ack(batchId); // 提交确认
+                            break;
+                        } catch (Exception e) {
+                            if (i != retry - 1) {
+                                connector.rollback(batchId); // 处理失败, 回滚数据
+                            } else {
+                                connector.ack(batchId);
+                            }
+                            logger.error("sync error!", e);
+                            Thread.sleep(500);
+                        }
+                    }
+                }
+
+            } catch (Exception e) {
+                logger.error("process error!", e);
+            } finally {
+                connector.disconnect();
+                logger.info("=============> Disconnect destination: {} <=============", this.canalDestination);
+            }
+
+            if (running) { // is reconnect
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    // ignore
+                }
+            }
+        }
+
+        workerExecutor.shutdown();
+    }
+
+    @Override
+    public void stop() {
+        try {
+            if (!running) {
+                return;
+            }
+
+            if (connector instanceof ClusterCanalConnector) {
+                ((ClusterCanalConnector) connector).stopRunning();
+            } else if (connector instanceof SimpleCanalConnector) {
+                ((SimpleCanalConnector) connector).stopRunning();
+            }
+
+            running = false;
+
+            syncSwitch.release(canalDestination);
+
+            logger.info("destination {} is waiting for adapters' worker thread die!", canalDestination);
+            if (thread != null) {
+                try {
+                    thread.join();
+                } catch (InterruptedException e) {
+                    // ignore
+                }
+            }
+            groupInnerExecutorService.shutdown();
+            logger.info("destination {} adapters worker thread dead!", canalDestination);
+            canalOuterAdapters.forEach(outerAdapters -> outerAdapters.forEach(OuterAdapter::destroy));
+            logger.info("destination {} all adapters destroyed!", canalDestination);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+}

+ 90 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/monitor/ApplicationConfigMonitor.java

@@ -0,0 +1,90 @@
+package com.alibaba.otter.canal.adapter.launcher.monitor;
+
+import java.io.File;
+import java.io.FileReader;
+import java.util.Map;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import javax.annotation.Resource;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.cloud.context.refresh.ContextRefresher;
+import org.springframework.stereotype.Component;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.adapter.launcher.loader.CanalAdapterService;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+@Component
+public class ApplicationConfigMonitor {
+
+    private static final Logger   logger = LoggerFactory.getLogger(ApplicationConfigMonitor.class);
+
+    @Resource
+    private ContextRefresher      contextRefresher;
+
+    @Resource
+    private CanalAdapterService   canalAdapterService;
+
+    private FileAlterationMonitor fileMonitor;
+
+    @PostConstruct
+    public void init() {
+        File confDir = Util.getConfDirPath();
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(),
+                    FileFilterUtils.prefixFileFilter("application"),
+                    FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    @PreDestroy
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+            try {
+                // 检查yml格式
+                new Yaml().loadAs(new FileReader(file), Map.class);
+
+                canalAdapterService.destroy();
+
+                // refresh context
+                contextRefresher.refresh();
+
+                try {
+                    Thread.sleep(2000);
+                } catch (InterruptedException e) {
+                    // ignore
+                }
+                canalAdapterService.init();
+                logger.info("## adapter application config reloaded.");
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+    }
+}

+ 209 - 0
client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java

@@ -0,0 +1,209 @@
+package com.alibaba.otter.canal.adapter.launcher.rest;
+
+import java.util.*;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.web.bind.annotation.*;
+
+import com.alibaba.otter.canal.adapter.launcher.common.EtlLock;
+import com.alibaba.otter.canal.adapter.launcher.common.SyncSwitch;
+import com.alibaba.otter.canal.adapter.launcher.config.AdapterCanalConfig;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader;
+import com.alibaba.otter.canal.client.adapter.support.Result;
+
+/**
+ * 适配器操作Rest
+ *
+ * @author rewerma @ 2018-10-20
+ * @version 1.0.0
+ */
+@RestController
+public class CommonRest {
+
+    private static Logger                 logger           = LoggerFactory.getLogger(CommonRest.class);
+
+    private static final String           ETL_LOCK_ZK_NODE = "/sync-etl/";
+
+    private ExtensionLoader<OuterAdapter> loader;
+
+    @Resource
+    private SyncSwitch                    syncSwitch;
+    @Resource
+    private EtlLock                       etlLock;
+
+    @Resource
+    private AdapterCanalConfig            adapterCanalConfig;
+
+    @PostConstruct
+    public void init() {
+        loader = ExtensionLoader.getExtensionLoader(OuterAdapter.class);
+    }
+
+    /**
+     * ETL curl http://127.0.0.1:8081/etl/rdb/oracle1/mytest_user.yml -X POST
+     *
+     * @param type 类型 hbase, es
+     * @param key adapter key
+     * @param task 任务名对应配置文件名 mytest_user.yml
+     * @param params etl where条件参数, 为空全部导入
+     */
+    @PostMapping("/etl/{type}/{key}/{task}")
+    public EtlResult etl(@PathVariable String type, @PathVariable String key, @PathVariable String task,
+                         @RequestParam(name = "params", required = false) String params) {
+        OuterAdapter adapter = loader.getExtension(type, key);
+        String destination = adapter.getDestination(task);
+        String lockKey = destination == null ? task : destination;
+
+        boolean locked = etlLock.tryLock(ETL_LOCK_ZK_NODE + type + "-" + lockKey);
+        if (!locked) {
+            EtlResult result = new EtlResult();
+            result.setSucceeded(false);
+            result.setErrorMessage(task + " 有其他进程正在导入中, 请稍后再试");
+            return result;
+        }
+        try {
+
+            Boolean oriSwitchStatus;
+            if (destination != null) {
+                oriSwitchStatus = syncSwitch.status(destination);
+                if (oriSwitchStatus != null && oriSwitchStatus) {
+                    syncSwitch.off(destination);
+                }
+            } else {
+                // task可能为destination,直接锁task
+                oriSwitchStatus = syncSwitch.status(task);
+                if (oriSwitchStatus != null && oriSwitchStatus) {
+                    syncSwitch.off(task);
+                }
+            }
+            try {
+                List<String> paramArray = null;
+                if (params != null) {
+                    paramArray = Arrays.asList(params.trim().split(";"));
+                }
+                return adapter.etl(task, paramArray);
+            } finally {
+                if (destination != null && oriSwitchStatus != null && oriSwitchStatus) {
+                    syncSwitch.on(destination);
+                } else if (destination == null && oriSwitchStatus != null && oriSwitchStatus) {
+                    syncSwitch.on(task);
+                }
+            }
+        } finally {
+            etlLock.unlock(ETL_LOCK_ZK_NODE + type + "-" + lockKey);
+        }
+    }
+
+    /**
+     * ETL curl http://127.0.0.1:8081/etl/hbase/mytest_person2.yml -X POST
+     * 
+     * @param type 类型 hbase, es
+     * @param task 任务名对应配置文件名 mytest_person2.yml
+     * @param params etl where条件参数, 为空全部导入
+     */
+    @PostMapping("/etl/{type}/{task}")
+    public EtlResult etl(@PathVariable String type, @PathVariable String task,
+                         @RequestParam(name = "params", required = false) String params) {
+        return etl(type, null, task, params);
+    }
+
+    /**
+     * 统计总数 curl http://127.0.0.1:8081/count/rdb/oracle1/mytest_user.yml
+     *
+     * @param type 类型 hbase, es
+     * @param key adapter key
+     * @param task 任务名对应配置文件名 mytest_person2.yml
+     * @return
+     */
+    @GetMapping("/count/{type}/{key}/{task}")
+    public Map<String, Object> count(@PathVariable String type, @PathVariable String key, @PathVariable String task) {
+        OuterAdapter adapter = loader.getExtension(type, key);
+        return adapter.count(task);
+    }
+
+    /**
+     * 统计总数 curl http://127.0.0.1:8081/count/hbase/mytest_person2.yml
+     * 
+     * @param type 类型 hbase, es
+     * @param task 任务名对应配置文件名 mytest_person2.yml
+     * @return
+     */
+    @GetMapping("/count/{type}/{task}")
+    public Map<String, Object> count(@PathVariable String type, @PathVariable String task) {
+        return count(type, null, task);
+    }
+
+    /**
+     * 返回所有实例 curl http://127.0.0.1:8081/destinations
+     */
+    @GetMapping("/destinations")
+    public List<Map<String, String>> destinations() {
+        List<Map<String, String>> result = new ArrayList<>();
+        Set<String> destinations = adapterCanalConfig.DESTINATIONS;
+        for (String destination : destinations) {
+            Map<String, String> resMap = new LinkedHashMap<>();
+            Boolean status = syncSwitch.status(destination);
+            String resStatus = "none";
+            if (status != null && status) {
+                resStatus = "on";
+            } else if (status != null && !status) {
+                resStatus = "off";
+            }
+            resMap.put("destination", destination);
+            resMap.put("status", resStatus);
+            result.add(resMap);
+        }
+        return result;
+    }
+
+    /**
+     * 实例同步开关 curl http://127.0.0.1:8081/syncSwitch/example/off -X PUT
+     * 
+     * @param destination 实例名称
+     * @param status 开关状态: off on
+     * @return
+     */
+    @PutMapping("/syncSwitch/{destination}/{status}")
+    public Result etl(@PathVariable String destination, @PathVariable String status) {
+        if (status.equals("on")) {
+            syncSwitch.on(destination);
+            logger.info("#Destination: {} sync on", destination);
+            return Result.createSuccess("实例: " + destination + " 开启同步成功");
+        } else if (status.equals("off")) {
+            syncSwitch.off(destination);
+            logger.info("#Destination: {} sync off", destination);
+            return Result.createSuccess("实例: " + destination + " 关闭同步成功");
+        } else {
+            Result result = new Result();
+            result.setCode(50000);
+            result.setMessage("实例: " + destination + " 操作失败");
+            return result;
+        }
+    }
+
+    /**
+     * 获取实例开关状态 curl http://127.0.0.1:8081/syncSwitch/example
+     * 
+     * @param destination 实例名称
+     * @return
+     */
+    @GetMapping("/syncSwitch/{destination}")
+    public Map<String, String> etl(@PathVariable String destination) {
+        Boolean status = syncSwitch.status(destination);
+        String resStatus = "none";
+        if (status != null && status) {
+            resStatus = "on";
+        } else if (status != null && !status) {
+            resStatus = "off";
+        }
+        Map<String, String> res = new LinkedHashMap<>();
+        res.put("stauts", resStatus);
+        return res;
+    }
+}

+ 62 - 0
client-adapter/launcher/src/main/resources/application.yml

@@ -0,0 +1,62 @@
+server:
+  port: 8081
+logging:
+  level:
+    org.springframework: WARN
+    com.alibaba.otter.canal.client.adapter.hbase: DEBUG
+    com.alibaba.otter.canal.client.adapter.es: DEBUG
+    com.alibaba.otter.canal.client.adapter.rdb: DEBUG
+spring:
+  jackson:
+    date-format: yyyy-MM-dd HH:mm:ss
+    time-zone: GMT+8
+    default-property-inclusion: non_null
+
+canal.conf:
+  canalServerHost: 127.0.0.1:11111
+#  zookeeperHosts: slave1:2181
+#  mqServers: slave1:6667 #or rocketmq
+#  flatMessage: true
+  batchSize: 500
+  syncBatchSize: 1000
+  retries: 0
+  timeout:
+  accessKey:
+  secretKey:
+  mode: tcp # kafka rocketMQ
+#  srcDataSources:
+#    defaultDS:
+#      url: jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true
+#      username: root
+#      password: 121212
+  canalAdapters:
+  - instance: example # canal instance Name or mq topic name
+    groups:
+    - groupId: g1
+      outerAdapters:
+      - name: logger
+#      - name: rdb
+#        key: oracle1
+#        properties:
+#          jdbc.driverClassName: oracle.jdbc.OracleDriver
+#          jdbc.url: jdbc:oracle:thin:@localhost:49161:XE
+#          jdbc.username: mytest
+#          jdbc.password: m121212
+#      - name: rdb
+#        key: postgres1
+#        properties:
+#          jdbc.driverClassName: org.postgresql.Driver
+#          jdbc.url: jdbc:postgresql://localhost:5432/postgres
+#          jdbc.username: postgres
+#          jdbc.password: 121212
+#          threads: 1
+#          commitSize: 3000
+#      - name: hbase
+#        properties:
+#          hbase.zookeeper.quorum: 127.0.0.1
+#          hbase.zookeeper.property.clientPort: 2181
+#          zookeeper.znode.parent: /hbase
+#      - name: es
+#        hosts: 127.0.0.1:9300
+#        properties:
+#          cluster.name: elasticsearch

+ 46 - 0
client-adapter/logger/pom.xml

@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.logger</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter logger example module for otter ${project.version}</name>
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.common</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <configuration>
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 46 - 0
client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java

@@ -0,0 +1,46 @@
+package com.alibaba.otter.canal.client.adapter.logger;
+
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig;
+import com.alibaba.otter.canal.client.adapter.support.SPI;
+
+/**
+ * 外部适配器示例
+ *
+ * @author machengyuan 2018-8-19 下午11:45:38
+ * @version 1.0.0
+ */
+@SPI("logger")
+// logger参数对应CanalOuterAdapterConfiguration配置中的name
+public class LoggerAdapterExample implements OuterAdapter {
+
+    private Logger logger = LoggerFactory.getLogger(this.getClass());
+
+    @Override
+    public void init(OuterAdapterConfig configuration) {
+
+    }
+
+    public void sync(List<Dml> dmls) {
+        for (Dml dml : dmls) {
+            sync(dml);
+        }
+    }
+
+    public void sync(Dml dml) {
+        logger.info("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+    }
+
+    @Override
+    public void destroy() {
+
+    }
+}

+ 1 - 0
client-adapter/logger/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter

@@ -0,0 +1 @@
+logger=com.alibaba.otter.canal.client.adapter.logger.LoggerAdapterExample

+ 128 - 0
client-adapter/pom.xml

@@ -0,0 +1,128 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>canal.client-adapter</artifactId>
+    <version>1.1.3-SNAPSHOT</version>
+    <packaging>pom</packaging>
+    <name>canal client adapter module for otter ${project.version}</name>
+    <parent>
+        <groupId>com.alibaba.otter</groupId>
+        <artifactId>canal</artifactId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <maven.test.skip>true</maven.test.skip>
+        <downloadSources>true</downloadSources>
+        <java_source_version>1.8</java_source_version>
+        <java_target_version>1.8</java_target_version>
+        <file_encoding>UTF-8</file_encoding>
+    </properties>
+
+    <modules>
+        <module>common</module>
+        <module>logger</module>
+        <module>hbase</module>
+        <module>elasticsearch</module>
+        <module>launcher</module>
+        <module>rdb</module>
+    </modules>
+
+    <licenses>
+        <license>
+            <name>Apache License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0</url>
+        </license>
+    </licenses>
+
+    <scm>
+        <url>git@github.com:alibaba/canal.git</url>
+        <connection>scm:git:git@github.com:alibaba/canal.git</connection>
+        <developerConnection>scm:git:git@github.com:alibaba/canal.git</developerConnection>
+    </scm>
+
+    <repositories>
+        <repository>
+            <id>central</id>
+            <url>http://repo1.maven.org/maven2</url>
+            <releases>
+                <enabled>true</enabled>
+            </releases>
+            <snapshots>
+                <enabled>false</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>java.net</id>
+            <url>http://download.java.net/maven/2/</url>
+            <releases>
+                <enabled>true</enabled>
+            </releases>
+            <snapshots>
+                <enabled>false</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>aliyun</id>
+            <url>http://maven.aliyun.com/nexus/content/groups/public/</url>
+            <releases>
+                <enabled>true</enabled>
+            </releases>
+            <snapshots>
+                <enabled>false</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>sonatype</id>
+            <name>sonatype</name>
+            <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+            <releases>
+                <enabled>false</enabled>
+            </releases>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>sonatype-release</id>
+            <name>sonatype-release</name>
+            <url>https://oss.sonatype.org/service/local/repositories/releases/content</url>
+            <releases>
+                <enabled>false</enabled>
+            </releases>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+        </repository>
+    </repositories>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.7.0</version>
+                <configuration>
+                    <source>${java_source_version}</source>
+                    <target>${java_target_version}</target>
+                    <encoding>${file_encoding}</encoding>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+    <distributionManagement>
+        <snapshotRepository>
+            <id>sonatype-nexus-snapshots</id>
+            <name>Sonatype Nexus Snapshots</name>
+            <url>https://oss.sonatype.org/content/repositories/snapshots/</url>
+        </snapshotRepository>
+        <repository>
+            <id>sonatype-nexus-staging</id>
+            <name>Nexus Release Repository</name>
+            <url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>
+        </repository>
+    </distributionManagement>
+</project>

+ 97 - 0
client-adapter/rdb/pom.xml

@@ -0,0 +1,97 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>canal.client-adapter</artifactId>
+        <groupId>com.alibaba.otter</groupId>
+        <version>1.1.3-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.alibaba.otter</groupId>
+    <artifactId>client-adapter.rdb</artifactId>
+    <packaging>jar</packaging>
+    <name>canal client adapter rdb module for otter ${project.version}</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.alibaba.otter</groupId>
+            <artifactId>client-adapter.common</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.19</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.postgresql</groupId>
+            <artifactId>postgresql</artifactId>
+            <version>42.1.4</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.oracle</groupId>
+            <artifactId>ojdbc6</artifactId>
+            <version>11.2.0.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.microsoft.sqlserver</groupId>
+            <artifactId>mssql-jdbc</artifactId>
+            <version>7.0.0.jre8</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <configuration>
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                        <configuration>
+                            <tasks>
+                                <copy todir="${project.basedir}/../launcher/target/classes/rdb" overwrite="true">
+                                    <fileset dir="${project.basedir}/target/classes/rdb" erroronmissingdir="true">
+                                        <include name="*.yml" />
+                                    </fileset>
+                                </copy>
+                            </tasks>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 216 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java

@@ -0,0 +1,216 @@
+package com.alibaba.otter.canal.client.adapter.rdb;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.otter.canal.client.adapter.OuterAdapter;
+import com.alibaba.otter.canal.client.adapter.rdb.config.ConfigLoader;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.monitor.RdbConfigMonitor;
+import com.alibaba.otter.canal.client.adapter.rdb.service.RdbEtlService;
+import com.alibaba.otter.canal.client.adapter.rdb.service.RdbSyncService;
+import com.alibaba.otter.canal.client.adapter.support.*;
+
+@SPI("rdb")
+public class RdbAdapter implements OuterAdapter {
+
+    private static Logger                           logger             = LoggerFactory.getLogger(RdbAdapter.class);
+
+    private Map<String, MappingConfig>              rdbMapping         = new HashMap<>();                          // 文件名对应配置
+    private Map<String, Map<String, MappingConfig>> mappingConfigCache = new HashMap<>();                          // 库名-表名对应配置
+
+    private DruidDataSource                         dataSource;
+
+    private RdbSyncService                          rdbSyncService;
+
+    private ExecutorService                         executor           = Executors.newFixedThreadPool(1);
+
+    private RdbConfigMonitor                        rdbConfigMonitor;
+
+    public Map<String, MappingConfig> getRdbMapping() {
+        return rdbMapping;
+    }
+
+    public Map<String, Map<String, MappingConfig>> getMappingConfigCache() {
+        return mappingConfigCache;
+    }
+
+    @Override
+    public void init(OuterAdapterConfig configuration) {
+        Map<String, MappingConfig> rdbMappingTmp = ConfigLoader.load();
+        // 过滤不匹配的key的配置
+        rdbMappingTmp.forEach((key, mappingConfig) -> {
+            if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null)
+                || (mappingConfig.getOuterAdapterKey() != null
+                    && mappingConfig.getOuterAdapterKey().equalsIgnoreCase(configuration.getKey()))) {
+                rdbMapping.put(key, mappingConfig);
+            }
+        });
+        for (Map.Entry<String, MappingConfig> entry : rdbMapping.entrySet()) {
+            String configName = entry.getKey();
+            MappingConfig mappingConfig = entry.getValue();
+            Map<String, MappingConfig> configMap = mappingConfigCache
+                .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "."
+                                 + mappingConfig.getDbMapping().getDatabase() + "."
+                                 + mappingConfig.getDbMapping().getTable(),
+                    k1 -> new HashMap<>());
+            configMap.put(configName, mappingConfig);
+        }
+
+        Map<String, String> properties = configuration.getProperties();
+        dataSource = new DruidDataSource();
+        dataSource.setDriverClassName(properties.get("jdbc.driverClassName"));
+        dataSource.setUrl(properties.get("jdbc.url"));
+        dataSource.setUsername(properties.get("jdbc.username"));
+        dataSource.setPassword(properties.get("jdbc.password"));
+        dataSource.setInitialSize(1);
+        dataSource.setMinIdle(1);
+        dataSource.setMaxActive(20);
+        dataSource.setMaxWait(60000);
+        dataSource.setTimeBetweenEvictionRunsMillis(60000);
+        dataSource.setMinEvictableIdleTimeMillis(300000);
+
+        try {
+            dataSource.init();
+        } catch (SQLException e) {
+            logger.error("ERROR ## failed to initial datasource: " + properties.get("jdbc.url"), e);
+        }
+
+        String threads = properties.get("threads");
+        // String commitSize = properties.get("commitSize");
+
+        rdbSyncService = new RdbSyncService(mappingConfigCache,
+            dataSource,
+            threads != null ? Integer.valueOf(threads) : null);
+
+        rdbConfigMonitor = new RdbConfigMonitor();
+        rdbConfigMonitor.init(configuration.getKey(), this);
+    }
+
+    @Override
+    public void sync(List<Dml> dmls) {
+        rdbSyncService.sync(dmls);
+    }
+
+    @Override
+    public EtlResult etl(String task, List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        MappingConfig config = rdbMapping.get(task);
+        if (config != null) {
+            DataSource srcDataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey());
+            if (srcDataSource != null) {
+                return RdbEtlService.importData(srcDataSource, dataSource, config, params);
+            } else {
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("DataSource not found");
+                return etlResult;
+            }
+        } else {
+            StringBuilder resultMsg = new StringBuilder();
+            boolean resSucc = true;
+            // ds不为空说明传入的是destination
+            for (MappingConfig configTmp : rdbMapping.values()) {
+                // 取所有的destination为task的配置
+                if (configTmp.getDestination().equals(task)) {
+                    DataSource srcDataSource = DatasourceConfig.DATA_SOURCES.get(configTmp.getDataSourceKey());
+                    if (srcDataSource == null) {
+                        continue;
+                    }
+                    EtlResult etlRes = RdbEtlService.importData(srcDataSource, dataSource, configTmp, params);
+                    if (!etlRes.getSucceeded()) {
+                        resSucc = false;
+                        resultMsg.append(etlRes.getErrorMessage()).append("\n");
+                    } else {
+                        resultMsg.append(etlRes.getResultMessage()).append("\n");
+                    }
+                }
+            }
+            if (resultMsg.length() > 0) {
+                etlResult.setSucceeded(resSucc);
+                if (resSucc) {
+                    etlResult.setResultMessage(resultMsg.toString());
+                } else {
+                    etlResult.setErrorMessage(resultMsg.toString());
+                }
+                return etlResult;
+            }
+        }
+        etlResult.setSucceeded(false);
+        etlResult.setErrorMessage("Task not found");
+        return etlResult;
+    }
+
+    @Override
+    public Map<String, Object> count(String task) {
+        MappingConfig config = rdbMapping.get(task);
+        MappingConfig.DbMapping dbMapping = config.getDbMapping();
+        String sql = "SELECT COUNT(1) AS cnt FROM " + dbMapping.getTargetTable();
+        Connection conn = null;
+        Map<String, Object> res = new LinkedHashMap<>();
+        try {
+            conn = dataSource.getConnection();
+            Util.sqlRS(conn, sql, rs -> {
+                try {
+                    if (rs.next()) {
+                        Long rowCount = rs.getLong("cnt");
+                        res.put("count", rowCount);
+                    }
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            });
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        } finally {
+            if (conn != null) {
+                try {
+                    conn.close();
+                } catch (SQLException e) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+        }
+        res.put("targetTable", dbMapping.getTargetTable());
+
+        return res;
+    }
+
+    @Override
+    public String getDestination(String task) {
+        MappingConfig config = rdbMapping.get(task);
+        if (config != null) {
+            return config.getDestination();
+        }
+        return null;
+    }
+
+    @Override
+    public void destroy() {
+        if (rdbConfigMonitor != null) {
+            rdbConfigMonitor.destroy();
+        }
+
+        if (rdbSyncService != null) {
+            rdbSyncService.close();
+        }
+
+        executor.shutdown();
+
+        if (dataSource != null) {
+            dataSource.close();
+        }
+    }
+}

+ 46 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java

@@ -0,0 +1,46 @@
+package com.alibaba.otter.canal.client.adapter.rdb.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+
+/**
+ * RDB表映射配置加载器
+ *
+ * @author rewerma 2018-11-07 下午02:41:34
+ * @version 1.0.0
+ */
+public class ConfigLoader {
+
+    private static Logger logger = LoggerFactory.getLogger(ConfigLoader.class);
+
+    /**
+     * 加载HBase表映射配置
+     * 
+     * @return 配置名/配置文件名--对象
+     */
+    public static Map<String, MappingConfig> load() {
+        logger.info("## Start loading rdb mapping config ... ");
+
+        Map<String, MappingConfig> result = new LinkedHashMap<>();
+
+        Map<String, String> configContentMap = MappingConfigsLoader.loadConfigs("rdb");
+        configContentMap.forEach((fileName, content) -> {
+            MappingConfig config = new Yaml().loadAs(content, MappingConfig.class);
+            try {
+                config.validate();
+            } catch (Exception e) {
+                throw new RuntimeException("ERROR Config: " + fileName + " " + e.getMessage(), e);
+            }
+            result.put(fileName, config);
+        });
+
+        logger.info("## Rdb mapping config loaded");
+        return result;
+    }
+}

+ 182 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/MappingConfig.java

@@ -0,0 +1,182 @@
+package com.alibaba.otter.canal.client.adapter.rdb.config;
+
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * RDB表映射配置
+ *
+ * @author rewerma 2018-11-07 下午02:41:34
+ * @version 1.0.0
+ */
+public class MappingConfig {
+
+    private String    dataSourceKey;   // 数据源key
+
+    private String    destination;     // canal实例或MQ的topic
+
+    private String    outerAdapterKey; // 对应适配器的key
+
+    private Boolean   concurrent;      // 是否并行同步
+
+    private DbMapping dbMapping;       // db映射配置
+
+    public String getDataSourceKey() {
+        return dataSourceKey;
+    }
+
+    public void setDataSourceKey(String dataSourceKey) {
+        this.dataSourceKey = dataSourceKey;
+    }
+
+    public String getOuterAdapterKey() {
+        return outerAdapterKey;
+    }
+
+    public void setOuterAdapterKey(String outerAdapterKey) {
+        this.outerAdapterKey = outerAdapterKey;
+    }
+
+    public Boolean getConcurrent() {
+        return concurrent == null ? false : concurrent;
+    }
+
+    public void setConcurrent(Boolean concurrent) {
+        this.concurrent = concurrent;
+    }
+
+    public DbMapping getDbMapping() {
+        return dbMapping;
+    }
+
+    public void setDbMapping(DbMapping dbMapping) {
+        this.dbMapping = dbMapping;
+    }
+
+    public String getDestination() {
+        return destination;
+    }
+
+    public void setDestination(String destination) {
+        this.destination = destination;
+    }
+
+    public void validate() {
+        if (dbMapping.database == null || dbMapping.database.isEmpty()) {
+            throw new NullPointerException("dbMapping.database");
+        }
+        if (dbMapping.table == null || dbMapping.table.isEmpty()) {
+            throw new NullPointerException("dbMapping.table");
+        }
+        if (dbMapping.targetTable == null || dbMapping.targetTable.isEmpty()) {
+            throw new NullPointerException("dbMapping.targetTable");
+        }
+    }
+
+    public static class DbMapping {
+
+        private String              database;                            // 数据库名或schema名
+        private String              table;                               // 表面名
+        private Map<String, String> targetPk;                            // 目标表主键字段
+        private boolean             mapAll      = false;                 // 映射所有字段
+        private String              targetTable;                         // 目标表名
+        private Map<String, String> targetColumns;                       // 目标表字段映射
+
+        private String              etlCondition;                        // etl条件sql
+
+        private Set<String>         families    = new LinkedHashSet<>(); // column family列表
+        private int                 readBatch   = 5000;
+        private int                 commitBatch = 5000;                  // etl等批量提交大小
+
+        // private volatile Map<String, String> allColumns; // mapAll为true,自动设置改字段
+
+        public String getDatabase() {
+            return database;
+        }
+
+        public void setDatabase(String database) {
+            this.database = database;
+        }
+
+        public String getTable() {
+            return table;
+        }
+
+        public void setTable(String table) {
+            this.table = table;
+        }
+
+        public Map<String, String> getTargetPk() {
+            return targetPk;
+        }
+
+        public void setTargetPk(Map<String, String> targetPk) {
+            this.targetPk = targetPk;
+        }
+
+        public boolean isMapAll() {
+            return mapAll;
+        }
+
+        public void setMapAll(boolean mapAll) {
+            this.mapAll = mapAll;
+        }
+
+        public String getTargetTable() {
+            return targetTable;
+        }
+
+        public void setTargetTable(String targetTable) {
+            this.targetTable = targetTable;
+        }
+
+        public Map<String, String> getTargetColumns() {
+            return targetColumns;
+        }
+
+        public void setTargetColumns(Map<String, String> targetColumns) {
+            this.targetColumns = targetColumns;
+        }
+
+        public String getEtlCondition() {
+            return etlCondition;
+        }
+
+        public void setEtlCondition(String etlCondition) {
+            this.etlCondition = etlCondition;
+        }
+
+        public Set<String> getFamilies() {
+            return families;
+        }
+
+        public void setFamilies(Set<String> families) {
+            this.families = families;
+        }
+
+        public int getReadBatch() {
+            return readBatch;
+        }
+
+        public void setReadBatch(int readBatch) {
+            this.readBatch = readBatch;
+        }
+
+        public int getCommitBatch() {
+            return commitBatch;
+        }
+
+        public void setCommitBatch(int commitBatch) {
+            this.commitBatch = commitBatch;
+        }
+
+        // public Map<String, String> getAllColumns() {
+        // return allColumns;
+        // }
+        //
+        // public void setAllColumns(Map<String, String> allColumns) {
+        // this.allColumns = allColumns;
+        // }
+    }
+}

+ 141 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java

@@ -0,0 +1,141 @@
+package com.alibaba.otter.canal.client.adapter.rdb.monitor;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.filefilter.FileFilterUtils;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+public class RdbConfigMonitor {
+
+    private static final Logger   logger = LoggerFactory.getLogger(RdbConfigMonitor.class);
+
+    private static final String   adapterName = "rdb";
+
+    private String                key;
+
+    private RdbAdapter            rdbAdapter;
+
+    private FileAlterationMonitor fileMonitor;
+
+    public void init(String key, RdbAdapter rdbAdapter) {
+        this.key = key;
+        this.rdbAdapter = rdbAdapter;
+        File confDir = Util.getConfDirPath(adapterName);
+        try {
+            FileAlterationObserver observer = new FileAlterationObserver(confDir,
+                FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml")));
+            FileListener listener = new FileListener();
+            observer.addListener(listener);
+            fileMonitor = new FileAlterationMonitor(3000, observer);
+            fileMonitor.start();
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void destroy() {
+        try {
+            fileMonitor.stop();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private class FileListener extends FileAlterationListenerAdaptor {
+
+        @Override
+        public void onFileCreate(File file) {
+            super.onFileCreate(file);
+            try {
+                // 加载新增的配置文件
+                String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                config.validate();
+                if ((key == null && config.getOuterAdapterKey() == null)
+                    || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                    addConfigToCache(file, config);
+
+                    logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileChange(File file) {
+            super.onFileChange(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    // 加载配置文件
+                    String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName());
+                    MappingConfig config = new Yaml().loadAs(configContent, MappingConfig.class);
+                    config.validate();
+                    if ((key == null && config.getOuterAdapterKey() == null)
+                        || (key != null && key.equals(config.getOuterAdapterKey()))) {
+                        if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                            deleteConfigFromCache(file);
+                        }
+                        addConfigToCache(file, config);
+                    } else {
+                        // 不能修改outerAdapterKey
+                        throw new RuntimeException("Outer adapter key not allowed modify");
+                    }
+                    logger.info("Change a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        @Override
+        public void onFileDelete(File file) {
+            super.onFileDelete(file);
+
+            try {
+                if (rdbAdapter.getRdbMapping().containsKey(file.getName())) {
+                    deleteConfigFromCache(file);
+
+                    logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName());
+                }
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        private void addConfigToCache(File file, MappingConfig config) {
+            rdbAdapter.getRdbMapping().put(file.getName(), config);
+            Map<String, MappingConfig> configMap = rdbAdapter.getMappingConfigCache()
+                .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "."
+                                 + config.getDbMapping().getDatabase() + "." + config.getDbMapping().getTable(),
+                    k1 -> new HashMap<>());
+            configMap.put(file.getName(), config);
+        }
+
+        private void deleteConfigFromCache(File file) {
+
+            rdbAdapter.getRdbMapping().remove(file.getName());
+            for (Map<String, MappingConfig> configMap : rdbAdapter.getMappingConfigCache().values()) {
+                if (configMap != null) {
+                    configMap.remove(file.getName());
+                }
+            }
+
+        }
+    }
+}

+ 293 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java

@@ -0,0 +1,293 @@
+package com.alibaba.otter.canal.client.adapter.rdb.service;
+
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import javax.sql.DataSource;
+
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.support.EtlResult;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+import com.google.common.base.Joiner;
+
+/**
+ * RDB ETL 操作业务类
+ *
+ * @author rewerma @ 2018-11-7
+ * @version 1.0.0
+ */
+public class RdbEtlService {
+
+    private static final Logger logger = LoggerFactory.getLogger(RdbEtlService.class);
+
+    /**
+     * 导入数据
+     */
+    public static EtlResult importData(DataSource srcDS, DataSource targetDS, MappingConfig config,
+                                       List<String> params) {
+        EtlResult etlResult = new EtlResult();
+        AtomicLong successCount = new AtomicLong();
+        List<String> errMsg = new ArrayList<>();
+        String hbaseTable = "";
+        try {
+            if (config == null) {
+                logger.error("Config is null!");
+                etlResult.setSucceeded(false);
+                etlResult.setErrorMessage("Config is null!");
+                return etlResult;
+            }
+            DbMapping dbMapping = config.getDbMapping();
+
+            long start = System.currentTimeMillis();
+
+            // 拼接sql
+            StringBuilder sql = new StringBuilder(
+                "SELECT * FROM " + dbMapping.getDatabase() + "." + dbMapping.getTable());
+
+            // 拼接条件
+            appendCondition(params, dbMapping, srcDS, sql);
+
+            // 获取总数
+            String countSql = "SELECT COUNT(1) FROM ( " + sql + ") _CNT ";
+            long cnt = (Long) Util.sqlRS(srcDS, countSql, rs -> {
+                Long count = null;
+                try {
+                    if (rs.next()) {
+                        count = ((Number) rs.getObject(1)).longValue();
+                    }
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+                return count == null ? 0 : count;
+            });
+
+            // 当大于1万条记录时开启多线程
+            if (cnt >= 10000) {
+                int threadCount = 3;
+                long perThreadCnt = cnt / threadCount;
+                ExecutorService executor = Executors.newFixedThreadPool(threadCount);
+                List<Future<Boolean>> futures = new ArrayList<>(threadCount);
+                for (int i = 0; i < threadCount; i++) {
+                    long offset = i * perThreadCnt;
+                    Long size = null;
+                    if (i != threadCount - 1) {
+                        size = perThreadCnt;
+                    }
+                    String sqlFinal;
+                    if (size != null) {
+                        sqlFinal = sql + " LIMIT " + offset + "," + size;
+                    } else {
+                        sqlFinal = sql + " LIMIT " + offset + "," + cnt;
+                    }
+                    Future<Boolean> future = executor
+                        .submit(() -> executeSqlImport(srcDS, targetDS, sqlFinal, dbMapping, successCount, errMsg));
+                    futures.add(future);
+                }
+
+                for (Future<Boolean> future : futures) {
+                    future.get();
+                }
+
+                executor.shutdown();
+            } else {
+                executeSqlImport(srcDS, targetDS, sql.toString(), dbMapping, successCount, errMsg);
+            }
+
+            logger.info(
+                dbMapping.getTable() + " etl completed in: " + (System.currentTimeMillis() - start) / 1000 + "s!");
+
+            etlResult.setResultMessage("导入目标表 " + dbMapping.getTargetTable() + " 数据:" + successCount.get() + " 条");
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            errMsg.add(hbaseTable + " etl failed! ==>" + e.getMessage());
+        }
+
+        if (errMsg.isEmpty()) {
+            etlResult.setSucceeded(true);
+        } else {
+            etlResult.setErrorMessage(Joiner.on("\n").join(errMsg));
+        }
+        return etlResult;
+    }
+
+    private static void appendCondition(List<String> params, DbMapping dbMapping, DataSource ds,
+                                        StringBuilder sql) throws SQLException {
+        if (params != null && params.size() == 1 && dbMapping.getEtlCondition() == null) {
+            AtomicBoolean stExists = new AtomicBoolean(false);
+            // 验证是否有SYS_TIME字段
+            Util.sqlRS(ds, sql.toString(), rs -> {
+                try {
+                    ResultSetMetaData rsmd = rs.getMetaData();
+                    int cnt = rsmd.getColumnCount();
+                    for (int i = 1; i <= cnt; i++) {
+                        String columnName = rsmd.getColumnName(i);
+                        if ("SYS_TIME".equalsIgnoreCase(columnName)) {
+                            stExists.set(true);
+                            break;
+                        }
+                    }
+                } catch (Exception e) {
+                    // ignore
+                }
+                return null;
+            });
+            if (stExists.get()) {
+                sql.append(" WHERE SYS_TIME >= '").append(params.get(0)).append("' ");
+            }
+        } else if (dbMapping.getEtlCondition() != null && params != null) {
+            String etlCondition = dbMapping.getEtlCondition();
+            int size = params.size();
+            for (int i = 0; i < size; i++) {
+                etlCondition = etlCondition.replace("{" + i + "}", params.get(i));
+            }
+
+            sql.append(" ").append(etlCondition);
+        }
+    }
+
+    /**
+     * 执行导入
+     */
+    private static boolean executeSqlImport(DataSource srcDS, DataSource targetDS, String sql, DbMapping dbMapping,
+                                            AtomicLong successCount, List<String> errMsg) {
+        try {
+            Util.sqlRS(srcDS, sql, rs -> {
+                int idx = 1;
+
+                try {
+                    boolean completed = false;
+
+                    Map<String, Integer> columnType = new LinkedHashMap<>();
+                    ResultSetMetaData rsd = rs.getMetaData();
+                    int columnCount = rsd.getColumnCount();
+                    List<String> columns = new ArrayList<>();
+                    for (int i = 1; i <= columnCount; i++) {
+                        columnType.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
+                        columns.add(rsd.getColumnName(i));
+                    }
+
+                    Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, columns);
+                    // if (dbMapping.isMapAll()) {
+                    // columnsMap = dbMapping.getAllColumns();
+                    // } else {
+                    // columnsMap = dbMapping.getTargetColumns();
+                    // }
+
+                    StringBuilder insertSql = new StringBuilder();
+                    insertSql.append("INSERT INTO ").append(dbMapping.getTargetTable()).append(" (");
+                    columnsMap
+                        .forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+
+                    int len = insertSql.length();
+                    insertSql.delete(len - 1, len).append(") VALUES (");
+                    int mapLen = columnsMap.size();
+                    for (int i = 0; i < mapLen; i++) {
+                        insertSql.append("?,");
+                    }
+                    len = insertSql.length();
+                    insertSql.delete(len - 1, len).append(")");
+                    try (Connection connTarget = targetDS.getConnection();
+                            PreparedStatement pstmt = connTarget.prepareStatement(insertSql.toString())) {
+                        connTarget.setAutoCommit(false);
+
+                        while (rs.next()) {
+                            pstmt.clearParameters();
+
+                            // 删除数据
+                            Map<String, Object> values = new LinkedHashMap<>();
+                            StringBuilder deleteSql = new StringBuilder(
+                                "DELETE FROM " + dbMapping.getTargetTable() + " WHERE ");
+                            appendCondition(dbMapping, deleteSql, values, rs);
+                            try (PreparedStatement pstmt2 = connTarget.prepareStatement(deleteSql.toString())) {
+                                int k = 1;
+                                for (Object val : values.values()) {
+                                    pstmt2.setObject(k++, val);
+                                }
+                                pstmt2.execute();
+                            }
+
+                            int i = 1;
+                            for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+                                String targetClolumnName = entry.getKey();
+                                String srcColumnName = entry.getValue();
+                                if (srcColumnName == null) {
+                                    srcColumnName = targetClolumnName;
+                                }
+
+                                Integer type = columnType.get(targetClolumnName.toLowerCase());
+
+                                Object value = rs.getObject(srcColumnName);
+                                if (value != null) {
+                                    SyncUtil.setPStmt(type, pstmt, value, i);
+                                } else {
+                                    pstmt.setNull(i, type);
+                                }
+
+                                i++;
+                            }
+
+                            pstmt.execute();
+                            if (logger.isTraceEnabled()) {
+                                logger.trace("Insert into target table, sql: {}", insertSql);
+                            }
+
+                            if (idx % dbMapping.getCommitBatch() == 0) {
+                                connTarget.commit();
+                                completed = true;
+                            }
+                            idx++;
+                            successCount.incrementAndGet();
+                            if (logger.isDebugEnabled()) {
+                                logger.debug("successful import count:" + successCount.get());
+                            }
+                        }
+                        if (!completed) {
+                            connTarget.commit();
+                        }
+                    }
+
+                } catch (Exception e) {
+                    logger.error(dbMapping.getTable() + " etl failed! ==>" + e.getMessage(), e);
+                    errMsg.add(dbMapping.getTable() + " etl failed! ==>" + e.getMessage());
+                }
+                return idx;
+            });
+            return true;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    /**
+     * 拼接目标表主键where条件
+     */
+    private static void appendCondition(DbMapping dbMapping, StringBuilder sql, Map<String, Object> values,
+                                        ResultSet rs) throws SQLException {
+        // 拼接主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = targetColumnName;
+            }
+            sql.append(targetColumnName).append("=? AND ");
+            values.put(targetColumnName, rs.getObject(srcColumnName));
+        }
+        int len = sql.length();
+        sql.delete(len - 4, len);
+    }
+}

+ 409 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java

@@ -0,0 +1,409 @@
+package com.alibaba.otter.canal.client.adapter.rdb.service;
+
+import java.sql.Connection;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping;
+import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SingleDml;
+import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil;
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+import com.alibaba.otter.canal.client.adapter.support.Util;
+
+/**
+ * RDB同步操作业务
+ *
+ * @author rewerma 2018-11-7 下午06:45:49
+ * @version 1.0.0
+ */
+public class RdbSyncService {
+
+    private static final Logger                     logger             = LoggerFactory.getLogger(RdbSyncService.class);
+
+    private final Map<String, Map<String, Integer>> COLUMNS_TYPE_CACHE = new ConcurrentHashMap<>();
+
+    private Map<String, Map<String, MappingConfig>> mappingConfigCache;                                                // 库名-表名对应配置
+
+    private int                                     threads            = 3;
+
+    private List<SyncItem>[]                        dmlsPartition;
+    private BatchExecutor[]                         batchExecutors;
+    private ExecutorService[]                       executorThreads;
+
+    @SuppressWarnings("unchecked")
+    public RdbSyncService(Map<String, Map<String, MappingConfig>> mappingConfigCache, DataSource dataSource,
+                          Integer threads){
+        try {
+            if (threads != null) {
+                this.threads = threads;
+            }
+            this.mappingConfigCache = mappingConfigCache;
+            this.dmlsPartition = new List[this.threads];
+            this.batchExecutors = new BatchExecutor[this.threads];
+            this.executorThreads = new ExecutorService[this.threads];
+            for (int i = 0; i < this.threads; i++) {
+                dmlsPartition[i] = new ArrayList<>();
+                batchExecutors[i] = new BatchExecutor(dataSource.getConnection());
+                executorThreads[i] = Executors.newSingleThreadExecutor();
+            }
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void sync(List<Dml> dmls) {
+        try {
+            for (Dml dml : dmls) {
+                String destination = StringUtils.trimToEmpty(dml.getDestination());
+                String database = dml.getDatabase();
+                String table = dml.getTable();
+                Map<String, MappingConfig> configMap = mappingConfigCache
+                    .get(destination + "." + database + "." + table);
+
+                for (MappingConfig config : configMap.values()) {
+
+                    if (config.getConcurrent()) {
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            int hash = pkHash(config.getDbMapping(), singleDml.getData());
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    } else {
+                        int hash = Math.abs(Math.abs(config.getDbMapping().getTargetTable().hashCode()) % threads);
+                        List<SingleDml> singleDmls = SingleDml.dml2SingleDmls(dml);
+                        singleDmls.forEach(singleDml -> {
+                            SyncItem syncItem = new SyncItem(config, singleDml);
+                            dmlsPartition[hash].add(syncItem);
+                        });
+                    }
+                }
+            }
+            List<Future> futures = new ArrayList<>();
+            for (int i = 0; i < threads; i++) {
+                int j = i;
+                futures.add(executorThreads[i].submit(() -> {
+                    dmlsPartition[j].forEach(syncItem -> sync(batchExecutors[j], syncItem.config, syncItem.singleDml));
+                    batchExecutors[j].commit();
+                    return true;
+                }));
+            }
+
+            futures.forEach(future -> {
+                try {
+                    future.get();
+                } catch (Exception e) {
+                    logger.error(e.getMessage(), e);
+                }
+            });
+
+            for (int i = 0; i < threads; i++) {
+                dmlsPartition[i].clear();
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    private void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        try {
+            if (config != null) {
+                String type = dml.getType();
+                if (type != null && type.equalsIgnoreCase("INSERT")) {
+                    insert(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("UPDATE")) {
+                    update(batchExecutor, config, dml);
+                } else if (type != null && type.equalsIgnoreCase("DELETE")) {
+                    delete(batchExecutor, config, dml);
+                }
+                if (logger.isDebugEnabled()) {
+                    logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue));
+                }
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 插入操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void insert(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        try {
+            Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+            StringBuilder insertSql = new StringBuilder();
+            insertSql.append("INSERT INTO ").append(dbMapping.getTargetTable()).append(" (");
+
+            columnsMap.forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(","));
+            int len = insertSql.length();
+            insertSql.delete(len - 1, len).append(") VALUES (");
+            int mapLen = columnsMap.size();
+            for (int i = 0; i < mapLen; i++) {
+                insertSql.append("?,");
+            }
+            len = insertSql.length();
+            insertSql.delete(len - 1, len).append(")");
+
+            Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+            List<Map<String, ?>> values = new ArrayList<>();
+            for (Map.Entry<String, String> entry : columnsMap.entrySet()) {
+                String targetClolumnName = entry.getKey();
+                String srcColumnName = entry.getValue();
+                if (srcColumnName == null) {
+                    srcColumnName = targetClolumnName;
+                }
+
+                Integer type = ctype.get(targetClolumnName.toLowerCase());
+
+                Object value = data.get(srcColumnName);
+
+                BatchExecutor.setValue(values, type, value);
+            }
+
+            batchExecutor.execute(insertSql.toString(), values);
+            if (logger.isTraceEnabled()) {
+                logger.trace("Insert into target table, sql: {}", insertSql);
+            }
+
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 更新操作
+     *
+     * @param config 配置项
+     * @param dml DML数据
+     */
+    private void update(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        Map<String, Object> old = dml.getOld();
+        if (old == null || old.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        try {
+            Map<String, String> columnsMap = SyncUtil.getColumnsMap(dbMapping, data);
+
+            Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+            StringBuilder updateSql = new StringBuilder();
+            updateSql.append("UPDATE ").append(dbMapping.getTargetTable()).append(" SET ");
+            List<Map<String, ?>> values = new ArrayList<>();
+            for (String srcColumnName : old.keySet()) {
+                List<String> targetColumnNames = new ArrayList<>();
+                columnsMap.forEach((targetColumn, srcColumn) -> {
+                    if (srcColumnName.toLowerCase().equals(srcColumn)) {
+                        targetColumnNames.add(targetColumn);
+                    }
+                });
+                if (!targetColumnNames.isEmpty()) {
+
+                    for (String targetColumnName : targetColumnNames) {
+                        updateSql.append(targetColumnName).append("=?, ");
+                        Integer type = ctype.get(targetColumnName.toLowerCase());
+                        BatchExecutor.setValue(values, type, data.get(srcColumnName));
+                    }
+                }
+            }
+            int len = updateSql.length();
+            updateSql.delete(len - 2, len).append(" WHERE ");
+
+            // 拼接主键
+            appendCondition(dbMapping, updateSql, ctype, values, data, old);
+
+            batchExecutor.execute(updateSql.toString(), values);
+
+            if (logger.isTraceEnabled()) {
+                logger.trace("Update target table, sql: {}", updateSql);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 删除操作
+     *
+     * @param config
+     * @param dml
+     */
+    private void delete(BatchExecutor batchExecutor, MappingConfig config, SingleDml dml) {
+        Map<String, Object> data = dml.getData();
+        if (data == null || data.isEmpty()) {
+            return;
+        }
+
+        DbMapping dbMapping = config.getDbMapping();
+
+        try {
+            Map<String, Integer> ctype = getTargetColumnType(batchExecutor.getConn(), config);
+
+            StringBuilder sql = new StringBuilder();
+            sql.append("DELETE FROM ").append(dbMapping.getTargetTable()).append(" WHERE ");
+
+            List<Map<String, ?>> values = new ArrayList<>();
+            // 拼接主键
+            appendCondition(dbMapping, sql, ctype, values, data);
+
+            batchExecutor.execute(sql.toString(), values);
+
+            if (logger.isTraceEnabled()) {
+                logger.trace("Delete from target table, sql: {}", sql);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * 获取目标字段类型
+     *
+     * @param conn sql connection
+     * @param config 映射配置
+     * @return 字段sqlType
+     */
+    private Map<String, Integer> getTargetColumnType(Connection conn, MappingConfig config) {
+        DbMapping dbMapping = config.getDbMapping();
+        String cacheKey = config.getDestination() + "." + dbMapping.getDatabase() + "." + dbMapping.getTable();
+        Map<String, Integer> columnType = COLUMNS_TYPE_CACHE.get(cacheKey);
+        if (columnType == null) {
+            synchronized (RdbSyncService.class) {
+                columnType = COLUMNS_TYPE_CACHE.get(cacheKey);
+                if (columnType == null) {
+                    columnType = new LinkedHashMap<>();
+                    final Map<String, Integer> columnTypeTmp = columnType;
+                    String sql = "SELECT * FROM " + dbMapping.getTargetTable() + " WHERE 1=2";
+                    Util.sqlRS(conn, sql, rs -> {
+                        try {
+                            ResultSetMetaData rsd = rs.getMetaData();
+                            int columnCount = rsd.getColumnCount();
+                            for (int i = 1; i <= columnCount; i++) {
+                                columnTypeTmp.put(rsd.getColumnName(i).toLowerCase(), rsd.getColumnType(i));
+                            }
+                            COLUMNS_TYPE_CACHE.put(cacheKey, columnTypeTmp);
+                        } catch (SQLException e) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    });
+                }
+            }
+        }
+        return columnType;
+    }
+
+    /**
+     * 拼接主键 where条件
+     */
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d) {
+        appendCondition(dbMapping, sql, ctype, values, d, null);
+    }
+
+    private void appendCondition(MappingConfig.DbMapping dbMapping, StringBuilder sql, Map<String, Integer> ctype,
+                                 List<Map<String, ?>> values, Map<String, Object> d, Map<String, Object> o) {
+        // 拼接主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = targetColumnName;
+            }
+            sql.append(targetColumnName).append("=? AND ");
+            Integer type = ctype.get(targetColumnName.toLowerCase());
+            // 如果有修改主键的情况
+            if (o != null && o.containsKey(srcColumnName)) {
+                BatchExecutor.setValue(values, type, o.get(srcColumnName));
+            } else {
+                BatchExecutor.setValue(values, type, d.get(srcColumnName));
+            }
+        }
+        int len = sql.length();
+        sql.delete(len - 4, len);
+    }
+
+    private class SyncItem {
+
+        private MappingConfig config;
+        private SingleDml     singleDml;
+
+        private SyncItem(MappingConfig config, SingleDml singleDml){
+            this.config = config;
+            this.singleDml = singleDml;
+        }
+    }
+
+    /**
+     * 取主键hash
+     */
+    private int pkHash(DbMapping dbMapping, Map<String, Object> d) {
+        return pkHash(dbMapping, d, null);
+    }
+
+    private int pkHash(DbMapping dbMapping, Map<String, Object> d, Map<String, Object> o) {
+        int hash = 0;
+        // 取主键
+        for (Map.Entry<String, String> entry : dbMapping.getTargetPk().entrySet()) {
+            String targetColumnName = entry.getKey();
+            String srcColumnName = entry.getValue();
+            if (srcColumnName == null) {
+                srcColumnName = targetColumnName;
+            }
+            Object value;
+            if (o != null && o.containsKey(srcColumnName)) {
+                value = o.get(srcColumnName);
+            } else {
+                value = d.get(srcColumnName);
+            }
+            if (value != null) {
+                hash += value.hashCode();
+            }
+        }
+        hash = Math.abs(hash) % threads;
+        return Math.abs(hash);
+    }
+
+    public void close() {
+        for (int i = 0; i < threads; i++) {
+            batchExecutors[i].close();
+            executorThreads[i].shutdown();
+        }
+    }
+}

+ 91 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/BatchExecutor.java

@@ -0,0 +1,91 @@
+package com.alibaba.otter.canal.client.adapter.rdb.support;
+
+import java.io.Closeable;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class BatchExecutor implements Closeable {
+
+    private static final Logger logger = LoggerFactory.getLogger(BatchExecutor.class);
+
+    private Integer             key;
+    private Connection          conn;
+    private AtomicInteger       idx    = new AtomicInteger(0);
+
+    public BatchExecutor(Connection conn){
+        this(1, conn);
+    }
+
+    public BatchExecutor(Integer key, Connection conn){
+        this.key = key;
+        this.conn = conn;
+
+        try {
+            this.conn.setAutoCommit(false);
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public Integer getKey() {
+        return key;
+    }
+
+    public Connection getConn() {
+        return conn;
+    }
+
+    public static void setValue(List<Map<String, ?>> values, int type, Object value) {
+        Map<String, Object> valueItem = new HashMap<>();
+        valueItem.put("type", type);
+        valueItem.put("value", value);
+        values.add(valueItem);
+    }
+
+    public void execute(String sql, List<Map<String, ?>> values) {
+        try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
+            int len = values.size();
+            for (int i = 0; i < len; i++) {
+                int type = (Integer) values.get(i).get("type");
+                Object value = values.get(i).get("value");
+                SyncUtil.setPStmt(type, pstmt, value, i + 1);
+            }
+
+            pstmt.execute();
+            idx.incrementAndGet();
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void commit() {
+        try {
+            conn.commit();
+            if (logger.isTraceEnabled()) {
+                logger.trace("Batch executor: " + key + " commit " + idx.get() + " rows");
+            }
+            idx.set(0);
+        } catch (SQLException e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    @Override
+    public void close() {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (SQLException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+    }
+}

+ 83 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SingleDml.java

@@ -0,0 +1,83 @@
+package com.alibaba.otter.canal.client.adapter.rdb.support;
+
+import com.alibaba.otter.canal.client.adapter.support.Dml;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class SingleDml {
+
+    private String              destination;
+    private String              database;
+    private String              table;
+    private String              type;
+    private Map<String, Object> data;
+    private Map<String, Object> old;
+
+    public String getDestination() {
+        return destination;
+    }
+
+    public void setDestination(String destination) {
+        this.destination = destination;
+    }
+
+    public String getDatabase() {
+        return database;
+    }
+
+    public void setDatabase(String database) {
+        this.database = database;
+    }
+
+    public String getTable() {
+        return table;
+    }
+
+    public void setTable(String table) {
+        this.table = table;
+    }
+
+    public String getType() {
+        return type;
+    }
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+    public Map<String, Object> getData() {
+        return data;
+    }
+
+    public void setData(Map<String, Object> data) {
+        this.data = data;
+    }
+
+    public Map<String, Object> getOld() {
+        return old;
+    }
+
+    public void setOld(Map<String, Object> old) {
+        this.old = old;
+    }
+
+    public static List<SingleDml> dml2SingleDmls(Dml dml) {
+        int size = dml.getData().size();
+        List<SingleDml> singleDmls = new ArrayList<>(size);
+        for (int i = 0; i < size; i++) {
+            SingleDml singleDml = new SingleDml();
+            singleDml.setDestination(dml.getDestination());
+            singleDml.setDatabase(dml.getDatabase());
+            singleDml.setTable(dml.getTable());
+            singleDml.setType(dml.getType());
+            singleDml.setData(dml.getData().get(i));
+            if (dml.getOld() != null) {
+                singleDml.setOld(dml.getOld().get(i));
+            }
+            singleDmls.add(singleDml);
+        }
+        return singleDmls;
+    }
+}

+ 252 - 0
client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/support/SyncUtil.java

@@ -0,0 +1,252 @@
+package com.alibaba.otter.canal.client.adapter.rdb.support;
+
+import java.io.Reader;
+import java.io.StringReader;
+import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
+import java.sql.*;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+
+import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig;
+
+public class SyncUtil {
+
+    public static Map<String, String> getColumnsMap(MappingConfig.DbMapping dbMapping, Map<String, Object> data) {
+        return getColumnsMap(dbMapping, data.keySet());
+    }
+
+    public static Map<String, String> getColumnsMap(MappingConfig.DbMapping dbMapping, Collection<String> columns) {
+        Map<String, String> columnsMap;
+        if (dbMapping.isMapAll()) {
+            columnsMap = new LinkedHashMap<>();
+            for (String srcColumn : columns) {
+                boolean flag = true;
+                if (dbMapping.getTargetColumns() != null) {
+                    for (Map.Entry<String, String> entry : dbMapping.getTargetColumns().entrySet()) {
+                        if (srcColumn.equals(entry.getValue())) {
+                            columnsMap.put(entry.getKey(), srcColumn);
+                            flag = false;
+                            break;
+                        }
+                    }
+                }
+                if (flag) {
+                    columnsMap.put(srcColumn, srcColumn);
+                }
+            }
+        } else {
+            columnsMap = dbMapping.getTargetColumns();
+        }
+        return columnsMap;
+    }
+
+    /**
+     * 设置 preparedStatement
+     *
+     * @param type sqlType
+     * @param pstmt 需要设置的preparedStatement
+     * @param value 值
+     * @param i 索引号
+     */
+    public static void setPStmt(int type, PreparedStatement pstmt, Object value, int i) throws SQLException {
+        switch (type) {
+            case Types.BIT:
+            case Types.BOOLEAN:
+                if (value instanceof Boolean) {
+                    pstmt.setBoolean(i, (Boolean) value);
+                } else if (value instanceof String) {
+                    boolean v = !value.equals("0");
+                    pstmt.setBoolean(i, v);
+                } else if (value instanceof Number) {
+                    boolean v = ((Number) value).intValue() != 0;
+                    pstmt.setBoolean(i, v);
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.CHAR:
+            case Types.NCHAR:
+            case Types.VARCHAR:
+            case Types.LONGVARCHAR:
+                if (value instanceof String) {
+                    pstmt.setString(i, (String) value);
+                } else {
+                    pstmt.setString(i, value.toString());
+                }
+                break;
+            case Types.TINYINT:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer) {
+                    pstmt.setByte(i, (byte) value);
+                } else if (value instanceof Number) {
+                    pstmt.setByte(i, ((Number) value).byteValue());
+                } else if (value instanceof String) {
+                    pstmt.setByte(i, Byte.parseByte((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.SMALLINT:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer) {
+                    pstmt.setShort(i, (short) value);
+                } else if (value instanceof Number) {
+                    pstmt.setShort(i, ((Number) value).shortValue());
+                } else if (value instanceof String) {
+                    pstmt.setShort(i, Short.parseShort((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.INTEGER:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer
+                    || value instanceof Long) {
+                    pstmt.setInt(i, (int) value);
+                } else if (value instanceof Number) {
+                    pstmt.setInt(i, ((Number) value).intValue());
+                } else if (value instanceof String) {
+                    pstmt.setInt(i, Integer.parseInt((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.BIGINT:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer
+                    || value instanceof Long) {
+                    pstmt.setLong(i, (long) value);
+                } else if (value instanceof Number) {
+                    pstmt.setLong(i, ((Number) value).longValue());
+                } else if (value instanceof String) {
+                    pstmt.setLong(i, Long.parseLong((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.DECIMAL:
+            case Types.NUMERIC:
+                if (value instanceof BigDecimal) {
+                    pstmt.setBigDecimal(i, (BigDecimal) value);
+                } else if (value instanceof Byte) {
+                    pstmt.setInt(i, (int) value);
+                } else if (value instanceof Short) {
+                    pstmt.setInt(i, (int) value);
+                } else if (value instanceof Integer) {
+                    pstmt.setInt(i, (int) value);
+                } else if (value instanceof Long) {
+                    pstmt.setLong(i, (long) value);
+                } else if (value instanceof Float) {
+                    pstmt.setBigDecimal(i, new BigDecimal((float) value));
+                } else if (value instanceof Double) {
+                    pstmt.setBigDecimal(i, new BigDecimal((double) value));
+                } else {
+                    pstmt.setBigDecimal(i, new BigDecimal(value.toString()));
+                }
+                break;
+            case Types.REAL:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer || value instanceof Long
+                    || value instanceof Float || value instanceof Double) {
+                    pstmt.setFloat(i, (float) value);
+                } else if (value instanceof Number) {
+                    pstmt.setFloat(i, ((Number) value).floatValue());
+                } else if (value instanceof String) {
+                    pstmt.setFloat(i, Float.parseFloat((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.FLOAT:
+            case Types.DOUBLE:
+                if (value instanceof Byte || value instanceof Short || value instanceof Integer || value instanceof Long
+                    || value instanceof Float || value instanceof Double) {
+                    pstmt.setDouble(i, (double) value);
+                } else if (value instanceof Number) {
+                    pstmt.setDouble(i, ((Number) value).doubleValue());
+                } else if (value instanceof String) {
+                    pstmt.setDouble(i, Double.parseDouble((String) value));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.BINARY:
+            case Types.VARBINARY:
+            case Types.LONGVARBINARY:
+            case Types.BLOB:
+                if (value instanceof Blob) {
+                    pstmt.setBlob(i, (Blob) value);
+                } else if (value instanceof byte[]) {
+                    pstmt.setBytes(i, (byte[]) value);
+                } else if (value instanceof String) {
+                    pstmt.setBytes(i, ((String) value).getBytes(StandardCharsets.ISO_8859_1));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.CLOB:
+                if (value instanceof Clob) {
+                    pstmt.setClob(i, (Clob) value);
+                } else if (value instanceof byte[]) {
+                    pstmt.setBytes(i, (byte[]) value);
+                } else if (value instanceof String) {
+                    Reader clobReader = new StringReader((String) value);
+                    pstmt.setCharacterStream(i, clobReader);
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.DATE:
+                if (value instanceof java.sql.Date) {
+                    pstmt.setDate(i, (java.sql.Date) value);
+                } else if (value instanceof java.util.Date) {
+                    pstmt.setDate(i, new java.sql.Date(((java.util.Date) value).getTime()));
+                } else if (value instanceof String) {
+                    String v = (String) value;
+                    if (!v.startsWith("0000-00-00")) {
+                        v = v.trim().replace(" ", "T");
+                        DateTime dt = new DateTime(v);
+                        pstmt.setDate(i, new Date(dt.toDate().getTime()));
+                    } else {
+                        pstmt.setNull(i, type);
+                    }
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.TIME:
+                if (value instanceof java.sql.Time) {
+                    pstmt.setTime(i, (java.sql.Time) value);
+                } else if (value instanceof java.util.Date) {
+                    pstmt.setTime(i, new java.sql.Time(((java.util.Date) value).getTime()));
+                } else if (value instanceof String) {
+                    String v = (String) value;
+                    v = "T" + v;
+                    DateTime dt = new DateTime(v);
+                    pstmt.setTime(i, new Time(dt.toDate().getTime()));
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            case Types.TIMESTAMP:
+                if (value instanceof java.sql.Timestamp) {
+                    pstmt.setTimestamp(i, (java.sql.Timestamp) value);
+                } else if (value instanceof java.util.Date) {
+                    pstmt.setTimestamp(i, new java.sql.Timestamp(((java.util.Date) value).getTime()));
+                } else if (value instanceof String) {
+                    String v = (String) value;
+                    if (!v.startsWith("0000-00-00")) {
+                        v = v.trim().replace(" ", "T");
+                        DateTime dt = new DateTime(v);
+                        pstmt.setTimestamp(i, new Timestamp(dt.toDate().getTime()));
+                    } else {
+                        pstmt.setNull(i, type);
+                    }
+                } else {
+                    pstmt.setNull(i, type);
+                }
+                break;
+            default:
+                pstmt.setObject(i, value, type);
+        }
+    }
+}

+ 1 - 0
client-adapter/rdb/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter

@@ -0,0 +1 @@
+rdb=com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter

Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott