Bladeren bron

add:
dataX新增

18339543638 1 jaar geleden
bovenliggende
commit
b5a38702d5
100 gewijzigde bestanden met toevoegingen van 11947 en 0 verwijderingen
  1. 34 0
      tr-dependencies/pom.xml
  2. 12 0
      tr-framework/src/main/java/cn/tr/core/exception/TRExcCode.java
  3. 1 0
      tr-modules/pom.xml
  4. 21 0
      tr-modules/tr-module-dataX/pom.xml
  5. 1 0
      tr-plugins/pom.xml
  6. 368 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/doc/mysqlreader.md
  7. 21 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/pom.xml
  8. 97 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/src/main/java/cn/tr/plugin/dataX/reader/mysqlreader/MysqlReader.java
  9. 31 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/src/main/java/cn/tr/plugin/dataX/reader/mysqlreader/MysqlReaderErrorCode.java
  10. 362 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/doc/mysqlwriter.md
  11. 21 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/pom.xml
  12. 102 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/src/main/java/cn/tr/plugin/dataX/writer/mysqlwriter/MysqlWriter.java
  13. 60 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/pom.xml
  14. 25 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/base/BaseObject.java
  15. 9 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/constant/CommonConstant.java
  16. 20 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/constant/PluginType.java
  17. 122 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/BoolColumn.java
  18. 90 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/BytesColumn.java
  19. 77 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/Column.java
  20. 215 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/ColumnCast.java
  21. 136 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/DateColumn.java
  22. 167 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/DoubleColumn.java
  23. 140 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/LongColumn.java
  24. 62 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/OverFlowUtil.java
  25. 29 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/Record.java
  26. 173 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/StringColumn.java
  27. 72 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/exception/DataXException.java
  28. 15 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/exception/ExceptionTracker.java
  29. 25 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractJobPlugin.java
  30. 99 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractPlugin.java
  31. 37 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractTaskPlugin.java
  32. 22 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/JobPluginCollector.java
  33. 9 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/PluginCollector.java
  34. 30 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/Pluginable.java
  35. 26 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/RecordReceiver.java
  36. 32 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/RecordSender.java
  37. 57 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/TaskPluginCollector.java
  38. 27 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Hook.java
  39. 51 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Reader.java
  40. 40 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Writer.java
  41. 277 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/PerfRecord.java
  42. 906 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/PerfTrace.java
  43. 412 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/VMInfo.java
  44. 1073 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/Configuration.java
  45. 37 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/ConfigurationUtil.java
  46. 228 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/DESCipher.java
  47. 33 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/DataXCaseEnvUtil.java
  48. 53 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/FilterUtil.java
  49. 61 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/IdAndKeyRollingUtil.java
  50. 161 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/ListUtil.java
  51. 54 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings.properties
  52. 53 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_en_US.properties
  53. 53 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_ja_JP.properties
  54. 54 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_CN.properties
  55. 104 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_HK.properties
  56. 104 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_TW.properties
  57. 203 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/MessageSource.java
  58. 230 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/RangeSplitUtil.java
  59. 208 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/RetryUtil.java
  60. 110 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/StrUtil.java
  61. 35 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/AbstractContainer.java
  62. 227 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/Engine.java
  63. 5 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings.properties
  64. 5 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_en_US.properties
  65. 5 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_ja_JP.properties
  66. 5 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_CN.properties
  67. 10 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_HK.properties
  68. 10 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_TW.properties
  69. 91 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/container/util/HookInvoker.java
  70. 178 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/container/util/JobAssignUtil.java
  71. 975 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/JobContainer.java
  72. 22 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/meta/ExecuteMode.java
  73. 32 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/meta/State.java
  74. 135 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/AbstractScheduler.java
  75. 60 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/processinner/ProcessInnerScheduler.java
  76. 19 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/processinner/StandAloneScheduler.java
  77. 281 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/Communication.java
  78. 285 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/CommunicationTool.java
  79. 62 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/LocalTGCommunicationManager.java
  80. 68 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/collector/AbstractCollector.java
  81. 17 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/collector/ProcessInnerCollector.java
  82. 88 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/AbstractContainerCommunicator.java
  83. 63 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java
  84. 74 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java
  85. 19 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java
  86. 11 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/report/AbstractReporter.java
  87. 18 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/report/ProcessInnerReporter.java
  88. 31 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/DefaultJobPluginCollector.java
  89. 76 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/AbstractTaskPluginCollector.java
  90. 24 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/HttpPluginCollector.java
  91. 73 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/StdoutPluginCollector.java
  92. 171 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/util/DirtyRecord.java
  93. 567 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/TaskGroupContainer.java
  94. 114 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/TaskMonitor.java
  95. 115 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/AbstractRunner.java
  96. 96 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/ReaderRunner.java
  97. 44 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/TaskGroupContainerRunner.java
  98. 91 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/WriterRunner.java
  99. 248 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/transport/channel/Channel.java
  100. 145 0
      tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/transport/channel/memory/MemoryChannel.java

+ 34 - 0
tr-dependencies/pom.xml

@@ -76,6 +76,12 @@
         <easy-excel.version>3.3.1</easy-excel.version>
         <easy-excel.version>3.3.1</easy-excel.version>
 
 
         <flyway.version>8.5.13</flyway.version>
         <flyway.version>8.5.13</flyway.version>
+
+        <httpclient.version>4.5.14</httpclient.version>
+
+        <commons-lang3.version>3.12.0</commons-lang3.version>
+
+        <commons-cli.version>1.5.0</commons-cli.version>
     </properties>
     </properties>
 
 
 
 
@@ -403,6 +409,14 @@
                 <version>${revision}</version>
                 <version>${revision}</version>
             </dependency>
             </dependency>
 
 
+
+            <!--数据对接插件-->
+            <dependency>
+                <groupId>cn.tr</groupId>
+                <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+                <version>${revision}</version>
+            </dependency>
+
             <!--敏感词插件-->
             <!--敏感词插件-->
             <dependency>
             <dependency>
                 <groupId>cn.tr</groupId>
                 <groupId>cn.tr</groupId>
@@ -482,6 +496,26 @@
                 <artifactId>flyway-mysql</artifactId>
                 <artifactId>flyway-mysql</artifactId>
                 <version>${flyway.version}</version>
                 <version>${flyway.version}</version>
             </dependency>
             </dependency>
+
+
+
+            <dependency>
+                <groupId>org.apache.httpcomponents</groupId>
+                <artifactId>httpclient</artifactId>
+                <version>${httpclient.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.commons</groupId>
+                <artifactId>commons-lang3</artifactId>
+                <version>${commons-lang3.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>commons-cli</groupId>
+                <artifactId>commons-cli</artifactId>
+                <version>${commons-cli.version}</version>
+            </dependency>
         </dependencies>
         </dependencies>
     </dependencyManagement>
     </dependencyManagement>
 </project>
 </project>

+ 12 - 0
tr-framework/src/main/java/cn/tr/core/exception/TRExcCode.java

@@ -206,6 +206,18 @@ public enum TRExcCode implements BaseCode {
     SERVICE_ERROR_C0501("C0501", "短信提醒服务失败"),
     SERVICE_ERROR_C0501("C0501", "短信提醒服务失败"),
     SERVICE_ERROR_C0502("C0502", "语音提醒服务失败"),
     SERVICE_ERROR_C0502("C0502", "语音提醒服务失败"),
     SERVICE_ERROR_C0503("C0503", "邮件提醒服务失败"),
     SERVICE_ERROR_C0503("C0503", "邮件提醒服务失败"),
+
+
+    //DataX
+    CONFIG_ERROR("Common-00", "您提供的配置文件存在错误信息,请检查您的作业配置 ."),
+    CONVERT_NOT_SUPPORT("Common-01", "同步数据出现业务脏数据情况,数据类型转换错误 ."),
+    CONVERT_OVER_FLOW("Common-02", "同步数据出现业务脏数据情况,数据类型转换溢出 ."),
+    RETRY_FAIL("Common-10", "方法调用多次仍旧失败 ."),
+    RUNTIME_ERROR("Common-11", "运行时内部调用错误 ."),
+    HOOK_INTERNAL_ERROR("Common-12", "Hook运行错误 ."),
+    SHUT_DOWN_TASK("Common-20", "Task收到了shutdown指令,为failover做准备"),
+    WAIT_TIME_EXCEED("Common-21", "等待时间超出范围"),
+    TASK_HUNG_EXPIRED("Common-22", "任务hung住,Expired"),
     ;
     ;
     @Getter
     @Getter
     private String errCode;
     private String errCode;

+ 1 - 0
tr-modules/pom.xml

@@ -17,6 +17,7 @@
         <module>tr-module-gen</module>
         <module>tr-module-gen</module>
         <module>tr-module-export</module>
         <module>tr-module-export</module>
         <module>tr-module-quartz</module>
         <module>tr-module-quartz</module>
+        <module>tr-module-dataX</module>
     </modules>
     </modules>
 
 
 
 

+ 21 - 0
tr-modules/tr-module-dataX/pom.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>tr-modules</artifactId>
+        <groupId>cn.tr</groupId>
+        <version>0.0.9</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>tr-module-dataX</artifactId>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>cn.tr</groupId>
+            <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+        </dependency>
+    </dependencies>
+</project>

+ 1 - 0
tr-plugins/pom.xml

@@ -32,6 +32,7 @@
         <module>tr-spring-boot-starter-plugin-biz-constant</module>
         <module>tr-spring-boot-starter-plugin-biz-constant</module>
         <module>tr-spring-boot-starter-plugin-numbering-strategy</module>
         <module>tr-spring-boot-starter-plugin-numbering-strategy</module>
         <module>tr-spring-boot-starter-plugin-lock</module>
         <module>tr-spring-boot-starter-plugin-lock</module>
+        <module>tr-spring-boot-starter-plugin-dataX</module>
     </modules>
     </modules>
 
 
 </project>
 </project>

+ 368 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/doc/mysqlreader.md

@@ -0,0 +1,368 @@
+
+# MysqlReader 插件文档
+
+
+___
+
+
+
+## 1 快速介绍
+
+MysqlReader插件实现了从Mysql读取数据。在底层实现上,MysqlReader通过JDBC连接远程Mysql数据库,并执行相应的sql语句将数据从mysql库中SELECT出来。
+
+**不同于其他关系型数据库,MysqlReader不支持FetchSize.**
+
+## 2 实现原理
+
+简而言之,MysqlReader通过JDBC连接器连接到远程的Mysql数据库,并根据用户配置的信息生成查询SELECT SQL语句,然后发送到远程Mysql数据库,并将该SQL执行返回结果使用DataX自定义的数据类型拼装为抽象的数据集,并传递给下游Writer处理。
+
+对于用户配置Table、Column、Where的信息,MysqlReader将其拼接为SQL语句发送到Mysql数据库;对于用户配置querySql信息,MysqlReader直接将其发送到Mysql数据库。
+
+
+## 3 功能说明
+
+### 3.1 配置样例
+
+* 配置一个从Mysql数据库同步抽取数据到本地的作业:
+
+```
+{
+    "job": {
+        "setting": {
+            "speed": {
+                 "channel": 3
+            },
+            "errorLimit": {
+                "record": 0,
+                "percentage": 0.02
+            }
+        },
+        "content": [
+            {
+                "reader": {
+                    "name": "mysqlreader",
+                    "parameter": {
+                        "username": "root",
+                        "password": "root",
+                        "column": [
+                            "id",
+                            "name"
+                        ],
+                        "splitPk": "db_id",
+                        "connection": [
+                            {
+                                "table": [
+                                    "table"
+                                ],
+                                "jdbcUrl": [
+     "jdbc:mysql://127.0.0.1:3306/database"
+                                ]
+                            }
+                        ]
+                    }
+                },
+               "writer": {
+                    "name": "streamwriter",
+                    "parameter": {
+                        "print":true
+                    }
+                }
+            }
+        ]
+    }
+}
+
+```
+
+* 配置一个自定义SQL的数据库同步任务到本地内容的作业:
+
+```
+{
+    "job": {
+        "setting": {
+            "speed": {
+                 "channel":1
+            }
+        },
+        "content": [
+            {
+                "reader": {
+                    "name": "mysqlreader",
+                    "parameter": {
+                        "username": "root",
+                        "password": "root",
+                        "connection": [
+                            {
+                                "querySql": [
+                                    "select db_id,on_line_flag from db_info where db_id < 10;"
+                                ],
+                                "jdbcUrl": [
+                                    "jdbc:mysql://bad_ip:3306/database",
+                                    "jdbc:mysql://127.0.0.1:bad_port/database",
+                                    "jdbc:mysql://127.0.0.1:3306/database"
+                                ]
+                            }
+                        ]
+                    }
+                },
+                "writer": {
+                    "name": "streamwriter",
+                    "parameter": {
+                        "print": false,
+                        "encoding": "UTF-8"
+                    }
+                }
+            }
+        ]
+    }
+}
+```
+
+
+### 3.2 参数说明
+
+* **jdbcUrl**
+
+	* 描述:描述的是到对端数据库的JDBC连接信息,使用JSON的数组描述,并支持一个库填写多个连接地址。之所以使用JSON数组描述连接信息,是因为阿里集团内部支持多个IP探测,如果配置了多个,MysqlReader可以依次探测ip的可连接性,直到选择一个合法的IP。如果全部连接失败,MysqlReader报错。 注意,jdbcUrl必须包含在connection配置单元中。对于阿里集团外部使用情况,JSON数组填写一个JDBC连接即可。
+
+		jdbcUrl按照Mysql官方规范,并可以填写连接附件控制信息。具体请参看[Mysql官方文档](http://dev.mysql.com/doc/connector-j/en/connector-j-reference-configuration-properties.html)。
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **username**
+
+	* 描述:数据源的用户名 <br />
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **password**
+
+	* 描述:数据源指定用户名的密码 <br />
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **table**
+
+	* 描述:所选取的需要同步的表。使用JSON的数组描述,因此支持多张表同时抽取。当配置为多张表时,用户自己需保证多张表是同一schema结构,MysqlReader不予检查表是否同一逻辑表。注意,table必须包含在connection配置单元中。<br />
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **column**
+
+	* 描述:所配置的表中需要同步的列名集合,使用JSON的数组描述字段信息。用户使用\*代表默认使用所有列配置,例如['\*']。
+
+	  支持列裁剪,即列可以挑选部分列进行导出。
+
+      支持列换序,即列可以不按照表schema信息进行导出。
+
+	  支持常量配置,用户需要按照Mysql SQL语法格式:
+	  ["id", "\`table\`", "1", "'bazhen.csy'", "null", "to_char(a + 1)", "2.3" , "true"]
+	  id为普通列名,\`table\`为包含保留字的列名,1为整形数字常量,'bazhen.csy'为字符串常量,null为空指针,to_char(a + 1)为表达式,2.3为浮点数,true为布尔值。
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **splitPk**
+
+	* 描述:MysqlReader进行数据抽取时,如果指定splitPk,表示用户希望使用splitPk代表的字段进行数据分片,DataX因此会启动并发任务进行数据同步,这样可以大大提供数据同步的效能。
+
+	  推荐splitPk用户使用表主键,因为表主键通常情况下比较均匀,因此切分出来的分片也不容易出现数据热点。
+
+	  目前splitPk仅支持整形数据切分,`不支持浮点、字符串、日期等其他类型`。如果用户指定其他非支持类型,MysqlReader将报错!
+
+	  如果splitPk不填写,包括不提供splitPk或者splitPk值为空,DataX视作使用单通道同步该表数据。
+
+	* 必选:否 <br />
+
+	* 默认值:空 <br />
+
+* **where**
+
+	* 描述:筛选条件,MysqlReader根据指定的column、table、where条件拼接SQL,并根据这个SQL进行数据抽取。在实际业务场景中,往往会选择当天的数据进行同步,可以将where条件指定为gmt_create > $bizdate 。注意:不可以将where条件指定为limit 10,limit不是SQL的合法where子句。<br />
+
+          where条件可以有效地进行业务增量同步。如果不填写where语句,包括不提供where的key或者value,DataX均视作同步全量数据。
+
+	* 必选:否 <br />
+
+	* 默认值:无 <br />
+
+* **querySql**
+
+	* 描述:在有些业务场景下,where这一配置项不足以描述所筛选的条件,用户可以通过该配置型来自定义筛选SQL。当用户配置了这一项之后,DataX系统就会忽略table,column这些配置型,直接使用这个配置项的内容对数据进行筛选,例如需要进行多表join后同步数据,使用select a,b from table_a join table_b on table_a.id = table_b.id <br />
+
+	 `当用户配置querySql时,MysqlReader直接忽略table、column、where条件的配置`,querySql优先级大于table、column、where选项。
+
+	* 必选:否 <br />
+
+	* 默认值:无 <br />
+
+
+### 3.3 类型转换
+
+目前MysqlReader支持大部分Mysql类型,但也存在部分个别类型没有支持的情况,请注意检查你的类型。
+
+下面列出MysqlReader针对Mysql类型转换列表:
+
+
+| DataX 内部类型| Mysql 数据类型    |
+| -------- | -----  |
+| Long     |int, tinyint, smallint, mediumint, int, bigint|
+| Double   |float, double, decimal|
+| String   |varchar, char, tinytext, text, mediumtext, longtext, year   |
+| Date     |date, datetime, timestamp, time    |
+| Boolean  |bit, bool   |
+| Bytes    |tinyblob, mediumblob, blob, longblob, varbinary    |
+
+
+
+请注意:
+
+* `除上述罗列字段类型外,其他类型均不支持`。
+* `tinyint(1) DataX视作为整形`。
+* `year DataX视作为字符串类型`
+* `bit DataX属于未定义行为`。
+
+## 4 性能报告
+
+### 4.1 环境准备
+
+#### 4.1.1 数据特征
+建表语句:
+
+	CREATE TABLE `tc_biz_vertical_test_0000` (
+  	`biz_order_id` bigint(20) NOT NULL COMMENT 'id',
+  	`key_value` varchar(4000) NOT NULL COMMENT 'Key-value的内容',
+  	`gmt_create` datetime NOT NULL COMMENT '创建时间',
+  	`gmt_modified` datetime NOT NULL COMMENT '修改时间',
+  	`attribute_cc` int(11) DEFAULT NULL COMMENT '防止并发修改的标志',
+  	`value_type` int(11) NOT NULL DEFAULT '0' COMMENT '类型',
+  	`buyer_id` bigint(20) DEFAULT NULL COMMENT 'buyerid',
+  	`seller_id` bigint(20) DEFAULT NULL COMMENT 'seller_id',
+  	PRIMARY KEY (`biz_order_id`,`value_type`),
+  	KEY `idx_biz_vertical_gmtmodified` (`gmt_modified`)
+	) ENGINE=InnoDB DEFAULT CHARSET=gbk COMMENT='tc_biz_vertical'
+
+
+单行记录类似于:
+
+	biz_order_id: 888888888
+   	   key_value: ;orderIds:20148888888,2014888888813800;
+  	  gmt_create: 2011-09-24 11:07:20
+	gmt_modified: 2011-10-24 17:56:34
+	attribute_cc: 1
+  	  value_type: 3
+    	buyer_id: 8888888
+   	   seller_id: 1
+
+#### 4.1.2 机器参数
+
+* 执行DataX的机器参数为:
+	1. cpu: 24核 Intel(R) Xeon(R) CPU E5-2630 0 @ 2.30GHz
+	2. mem: 48GB
+	3. net: 千兆双网卡
+	4. disc: DataX 数据不落磁盘,不统计此项
+
+* Mysql数据库机器参数为:
+	1. cpu: 32核 Intel(R) Xeon(R) CPU E5-2650 v2 @ 2.60GHz
+	2. mem: 256GB
+	3. net: 千兆双网卡
+	4. disc: BTWL419303E2800RGN  INTEL SSDSC2BB800G4   D2010370
+
+#### 4.1.3 DataX jvm 参数
+
+	-Xms1024m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
+
+
+### 4.2 测试报告
+
+#### 4.2.1 单表测试报告
+
+
+| 通道数| 是否按照主键切分| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡进入流量(MB/s)|DataX机器运行负载|DB网卡流出流量(MB/s)|DB运行负载|
+|--------|--------| --------|--------|--------|--------|--------|--------|
+|1| 否 | 183185 | 18.11 | 29| 0.6 | 31| 0.6 |
+|1| 是 | 183185 | 18.11 | 29| 0.6 | 31| 0.6 |
+|4| 否 | 183185 | 18.11 | 29| 0.6 | 31| 0.6 |
+|4| 是 | 329733 | 32.60 | 58| 0.8 | 60| 0.76 |
+|8| 否 | 183185 | 18.11 | 29| 0.6 | 31| 0.6 |
+|8| 是 | 549556 | 54.33 | 115| 1.46 | 120| 0.78 |
+
+说明:
+
+1. 这里的单表,主键类型为 bigint(20),范围为:190247559466810-570722244711460,从主键范围划分看,数据分布均匀。
+2. 对单表如果没有安装主键切分,那么配置通道个数不会提升速度,效果与1个通道一样。
+
+
+#### 4.2.2 分表测试报告(2个分库,每个分库16张分表,共计32张分表)
+
+
+| 通道数| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡进入流量(MB/s)|DataX机器运行负载|DB网卡流出流量(MB/s)|DB运行负载|
+|--------| --------|--------|--------|--------|--------|--------|
+|1| 202241 | 20.06 | 31.5| 1.0 | 32 | 1.1 |
+|4| 726358 | 72.04 | 123.9 | 3.1 | 132 | 3.6 |
+|8|1074405 | 106.56| 197 | 5.5 | 205| 5.1|
+|16| 1227892 | 121.79 | 229.2 | 8.1 | 233 | 7.3 |
+
+## 5 约束限制
+
+### 5.1 主备同步数据恢复问题
+
+主备同步问题指Mysql使用主从灾备,备库从主库不间断通过binlog恢复数据。由于主备数据同步存在一定的时间差,特别在于某些特定情况,例如网络延迟等问题,导致备库同步恢复的数据与主库有较大差别,导致从备库同步的数据不是一份当前时间的完整镜像。
+
+针对这个问题,我们提供了preSql功能,该功能待补充。
+
+### 5.2 一致性约束
+
+Mysql在数据存储划分中属于RDBMS系统,对外可以提供强一致性数据查询接口。例如当一次同步任务启动运行过程中,当该库存在其他数据写入方写入数据时,MysqlReader完全不会获取到写入更新数据,这是由于数据库本身的快照特性决定的。关于数据库快照特性,请参看[MVCC Wikipedia](https://en.wikipedia.org/wiki/Multiversion_concurrency_control)
+
+上述是在MysqlReader单线程模型下数据同步一致性的特性,由于MysqlReader可以根据用户配置信息使用了并发数据抽取,因此不能严格保证数据一致性:当MysqlReader根据splitPk进行数据切分后,会先后启动多个并发任务完成数据同步。由于多个并发任务相互之间不属于同一个读事务,同时多个并发任务存在时间间隔。因此这份数据并不是`完整的`、`一致的`数据快照信息。
+
+针对多线程的一致性快照需求,在技术上目前无法实现,只能从工程角度解决,工程化的方式存在取舍,我们提供几个解决思路给用户,用户可以自行选择:
+
+1. 使用单线程同步,即不再进行数据切片。缺点是速度比较慢,但是能够很好保证一致性。
+
+2. 关闭其他数据写入方,保证当前数据为静态数据,例如,锁表、关闭备库同步等等。缺点是可能影响在线业务。
+
+### 5.3 数据库编码问题
+
+Mysql本身的编码设置非常灵活,包括指定编码到库、表、字段级别,甚至可以均不同编码。优先级从高到低为字段、表、库、实例。我们不推荐数据库用户设置如此混乱的编码,最好在库级别就统一到UTF-8。
+
+MysqlReader底层使用JDBC进行数据抽取,JDBC天然适配各类编码,并在底层进行了编码转换。因此MysqlReader不需用户指定编码,可以自动获取编码并转码。
+
+对于Mysql底层写入编码和其设定的编码不一致的混乱情况,MysqlReader对此无法识别,对此也无法提供解决方案,对于这类情况,`导出有可能为乱码`。
+
+### 5.4 增量数据同步
+
+MysqlReader使用JDBC SELECT语句完成数据抽取工作,因此可以使用SELECT...WHERE...进行增量数据抽取,方式有多种:
+
+* 数据库在线应用写入数据库时,填充modify字段为更改时间戳,包括新增、更新、删除(逻辑删)。对于这类应用,MysqlReader只需要WHERE条件跟上一同步阶段时间戳即可。
+* 对于新增流水型数据,MysqlReader可以WHERE条件后跟上一阶段最大自增ID即可。
+
+对于业务上无字段区分新增、修改数据情况,MysqlReader也无法进行增量数据同步,只能同步全量数据。
+
+### 5.5 Sql安全性
+
+MysqlReader提供querySql语句交给用户自己实现SELECT抽取语句,MysqlReader本身对querySql不做任何安全性校验。这块交由DataX用户方自己保证。
+
+## 6 FAQ
+
+***
+
+**Q: MysqlReader同步报错,报错信息为XXX**
+
+ A: 网络或者权限问题,请使用mysql命令行测试:
+
+    mysql -u<username> -p<password> -h<ip> -D<database> -e "select * from <表名>"
+
+如果上述命令也报错,那可以证实是环境问题,请联系你的DBA。
+
+

+ 21 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/pom.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+        <groupId>cn.tr</groupId>
+        <version>0.0.9</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>mysql-reader</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>cn.tr</groupId>
+            <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+        </dependency>
+    </dependencies>
+
+</project>

+ 97 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/src/main/java/cn/tr/plugin/dataX/reader/mysqlreader/MysqlReader.java

@@ -0,0 +1,97 @@
+package cn.tr.plugin.dataX.reader.mysqlreader;
+
+import cn.tr.plugin.dataX.common.plugin.RecordSender;
+import cn.tr.plugin.dataX.common.spi.Reader;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.rdbms.reader.CommonRdbmsReader;
+import cn.tr.plugin.dataX.rdbms.reader.Constant;
+import cn.tr.plugin.dataX.rdbms.util.DataBaseType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class MysqlReader extends Reader {
+
+    private static final DataBaseType DATABASE_TYPE = DataBaseType.MySql;
+
+    public static class Job extends Reader.Job {
+        private static final Logger LOG = LoggerFactory
+                .getLogger(Job.class);
+
+        private Configuration originalConfig = null;
+        private CommonRdbmsReader.Job commonRdbmsReaderJob;
+
+        @Override
+        public void init() {
+            this.originalConfig = super.getPluginJobConf();
+
+            Integer userConfigedFetchSize = this.originalConfig.getInt(Constant.FETCH_SIZE);
+            if (userConfigedFetchSize != null) {
+                LOG.warn("对 mysqlreader 不需要配置 fetchSize, mysqlreader 将会忽略这项配置. 如果您不想再看到此警告,请去除fetchSize 配置.");
+            }
+
+            this.originalConfig.set(Constant.FETCH_SIZE, Integer.MIN_VALUE);
+
+            this.commonRdbmsReaderJob = new CommonRdbmsReader.Job(DATABASE_TYPE);
+            this.commonRdbmsReaderJob.init(this.originalConfig);
+        }
+
+        @Override
+        public void preCheck(){
+            init();
+            this.commonRdbmsReaderJob.preCheck(this.originalConfig,DATABASE_TYPE);
+
+        }
+
+        @Override
+        public List<Configuration> split(int adviceNumber) {
+            return this.commonRdbmsReaderJob.split(this.originalConfig, adviceNumber);
+        }
+
+        @Override
+        public void post() {
+            this.commonRdbmsReaderJob.post(this.originalConfig);
+        }
+
+        @Override
+        public void destroy() {
+            this.commonRdbmsReaderJob.destroy(this.originalConfig);
+        }
+
+    }
+
+    public static class Task extends Reader.Task {
+
+        private Configuration readerSliceConfig;
+        private CommonRdbmsReader.Task commonRdbmsReaderTask;
+
+        @Override
+        public void init() {
+            this.readerSliceConfig = super.getPluginJobConf();
+            this.commonRdbmsReaderTask = new CommonRdbmsReader.Task(DATABASE_TYPE,super.getTaskGroupId(), super.getTaskId());
+            this.commonRdbmsReaderTask.init(this.readerSliceConfig);
+
+        }
+
+        @Override
+        public void startRead(RecordSender recordSender) {
+            int fetchSize = this.readerSliceConfig.getInt(Constant.FETCH_SIZE);
+
+            this.commonRdbmsReaderTask.startRead(this.readerSliceConfig, recordSender,
+                    super.getTaskPluginCollector(), fetchSize);
+        }
+
+        @Override
+        public void post() {
+            this.commonRdbmsReaderTask.post(this.readerSliceConfig);
+        }
+
+        @Override
+        public void destroy() {
+            this.commonRdbmsReaderTask.destroy(this.readerSliceConfig);
+        }
+
+    }
+
+}

+ 31 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-reader/src/main/java/cn/tr/plugin/dataX/reader/mysqlreader/MysqlReaderErrorCode.java

@@ -0,0 +1,31 @@
+package cn.tr.plugin.dataX.reader.mysqlreader;
+
+import com.alibaba.datax.common.spi.ErrorCode;
+
+public enum MysqlReaderErrorCode implements ErrorCode {
+    ;
+
+    private final String code;
+    private final String description;
+
+    private MysqlReaderErrorCode(String code, String description) {
+        this.code = code;
+        this.description = description;
+    }
+
+    @Override
+    public String getCode() {
+        return this.code;
+    }
+
+    @Override
+    public String getDescription() {
+        return this.description;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("Code:[%s], Description:[%s]. ", this.code,
+                this.description);
+    }
+}

+ 362 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/doc/mysqlwriter.md

@@ -0,0 +1,362 @@
+
+# DataX MysqlWriter
+
+
+---
+
+
+## 1 快速介绍
+
+MysqlWriter 插件实现了写入数据到 Mysql 主库的目的表的功能。在底层实现上, MysqlWriter 通过 JDBC 连接远程 Mysql 数据库,并执行相应的 insert into ... 或者 ( replace into ...) 的 sql 语句将数据写入 Mysql,内部会分批次提交入库,需要数据库本身采用 innodb 引擎。
+
+MysqlWriter 面向ETL开发工程师,他们使用 MysqlWriter 从数仓导入数据到 Mysql。同时 MysqlWriter 亦可以作为数据迁移工具为DBA等用户提供服务。
+
+
+## 2 实现原理
+
+MysqlWriter 通过 DataX 框架获取 Reader 生成的协议数据,根据你配置的 `writeMode` 生成
+
+
+* `insert into...`(当主键/唯一性索引冲突时会写不进去冲突的行)
+
+##### 或者
+
+* `replace into...`(没有遇到主键/唯一性索引冲突时,与 insert into 行为一致,冲突时会用新行替换原有行所有字段) 的语句写入数据到 Mysql。出于性能考虑,采用了 `PreparedStatement + Batch`,并且设置了:`rewriteBatchedStatements=true`,将数据缓冲到线程上下文 Buffer 中,当 Buffer 累计到预定阈值时,才发起写入请求。
+
+<br />
+
+    注意:目的表所在数据库必须是主库才能写入数据;整个任务至少需要具备 insert/replace into...的权限,是否需要其他权限,取决于你任务配置中在 preSql 和 postSql 中指定的语句。
+
+
+## 3 功能说明
+
+### 3.1 配置样例
+
+* 这里使用一份从内存产生到 Mysql 导入的数据。
+
+```json
+{
+    "job": {
+        "setting": {
+            "speed": {
+                "channel": 1
+            }
+        },
+        "content": [
+            {
+                 "reader": {
+                    "name": "streamreader",
+                    "parameter": {
+                        "column" : [
+                            {
+                                "value": "DataX",
+                                "type": "string"
+                            },
+                            {
+                                "value": 19880808,
+                                "type": "long"
+                            },
+                            {
+                                "value": "1988-08-08 08:08:08",
+                                "type": "date"
+                            },
+                            {
+                                "value": true,
+                                "type": "bool"
+                            },
+                            {
+                                "value": "test",
+                                "type": "bytes"
+                            }
+                        ],
+                        "sliceRecordCount": 1000
+                    }
+                },
+                "writer": {
+                    "name": "mysqlwriter",
+                    "parameter": {
+                        "writeMode": "insert",
+                        "username": "root",
+                        "password": "root",
+                        "column": [
+                            "id",
+                            "name"
+                        ],
+                        "session": [
+                        	"set session sql_mode='ANSI'"
+                        ],
+                        "preSql": [
+                            "delete from test"
+                        ],
+                        "connection": [
+                            {
+                                "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/datax?useUnicode=true&characterEncoding=gbk",
+                                "table": [
+                                    "test"
+                                ]
+                            }
+                        ]
+                    }
+                }
+            }
+        ]
+    }
+}
+
+```
+
+
+### 3.2 参数说明
+
+* **jdbcUrl**
+
+	* 描述:目的数据库的 JDBC 连接信息。作业运行时,DataX 会在你提供的 jdbcUrl 后面追加如下属性:yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true
+
+               注意:1、在一个数据库上只能配置一个 jdbcUrl 值。这与 MysqlReader 支持多个备库探测不同,因为此处不支持同一个数据库存在多个主库的情况(双主导入数据情况)
+                    2、jdbcUrl按照Mysql官方规范,并可以填写连接附加控制信息,比如想指定连接编码为 gbk ,则在 jdbcUrl 后面追加属性 useUnicode=true&characterEncoding=gbk。具体请参看 Mysql官方文档或者咨询对应 DBA。
+
+
+ 	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **username**
+
+	* 描述:目的数据库的用户名 <br />
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **password**
+
+	* 描述:目的数据库的密码 <br />
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **table**
+
+	* 描述:目的表的表名称。支持写入一个或者多个表。当配置为多张表时,必须确保所有表结构保持一致。
+
+               注意:table 和 jdbcUrl 必须包含在 connection 配置单元中
+
+	* 必选:是 <br />
+
+	* 默认值:无 <br />
+
+* **column**
+
+	* 描述:目的表需要写入数据的字段,字段之间用英文逗号分隔。例如: "column": ["id","name","age"]。如果要依次写入全部列,使用`*`表示, 例如: `"column": ["*"]`。
+
+			**column配置项必须指定,不能留空!**
+
+               注意:1、我们强烈不推荐你这样配置,因为当你目的表字段个数、类型等有改动时,你的任务可能运行不正确或者失败
+                    2、 column 不能配置任何常量值
+
+	* 必选:是 <br />
+
+	* 默认值:否 <br />
+
+* **session**
+
+	* 描述: DataX在获取Mysql连接时,执行session指定的SQL语句,修改当前connection session属性
+
+	* 必须: 否
+
+	* 默认值: 空
+
+* **preSql**
+
+	* 描述:写入数据到目的表前,会先执行这里的标准语句。如果 Sql 中有你需要操作到的表名称,请使用 `@table` 表示,这样在实际执行 Sql 语句时,会对变量按照实际表名称进行替换。比如你的任务是要写入到目的端的100个同构分表(表名称为:datax_00,datax01, ... datax_98,datax_99),并且你希望导入数据前,先对表中数据进行删除操作,那么你可以这样配置:`"preSql":["delete from 表名"]`,效果是:在执行到每个表写入数据前,会先执行对应的 delete from 对应表名称 <br />
+
+	* 必选:否 <br />
+
+	* 默认值:无 <br />
+
+* **postSql**
+
+	* 描述:写入数据到目的表后,会执行这里的标准语句。(原理同 preSql ) <br />
+
+	* 必选:否 <br />
+
+	* 默认值:无 <br />
+
+* **writeMode**
+
+	* 描述:控制写入数据到目标表采用 `insert into` 或者 `replace into` 或者 `ON DUPLICATE KEY UPDATE` 语句<br />
+
+	* 必选:是 <br />
+	
+	* 所有选项:insert/replace/update <br />
+
+	* 默认值:insert <br />
+
+* **batchSize**
+
+	* 描述:一次性批量提交的记录数大小,该值可以极大减少DataX与Mysql的网络交互次数,并提升整体吞吐量。但是该值设置过大可能会造成DataX运行进程OOM情况。<br />
+
+	* 必选:否 <br />
+
+	* 默认值:1024 <br />
+
+
+### 3.3 类型转换
+
+类似 MysqlReader ,目前 MysqlWriter 支持大部分 Mysql 类型,但也存在部分个别类型没有支持的情况,请注意检查你的类型。
+
+下面列出 MysqlWriter 针对 Mysql 类型转换列表:
+
+
+| DataX 内部类型| Mysql 数据类型    |
+| -------- | -----  |
+| Long     |int, tinyint, smallint, mediumint, int, bigint, year|
+| Double   |float, double, decimal|
+| String   |varchar, char, tinytext, text, mediumtext, longtext    |
+| Date     |date, datetime, timestamp, time    |
+| Boolean  |bit, bool   |
+| Bytes    |tinyblob, mediumblob, blob, longblob, varbinary    |
+
+ * `bit类型目前是未定义类型转换`
+
+## 4 性能报告
+
+### 4.1 环境准备
+
+#### 4.1.1 数据特征
+建表语句:
+
+	CREATE TABLE `datax_mysqlwriter_perf_00` (
+  	`biz_order_id` bigint(20) NOT NULL AUTO_INCREMENT  COMMENT 'id',
+  	`key_value` varchar(4000) NOT NULL COMMENT 'Key-value的内容',
+  	`gmt_create` datetime NOT NULL COMMENT '创建时间',
+  	`gmt_modified` datetime NOT NULL COMMENT '修改时间',
+  	`attribute_cc` int(11) DEFAULT NULL COMMENT '防止并发修改的标志',
+  	`value_type` int(11) NOT NULL DEFAULT '0' COMMENT '类型',
+  	`buyer_id` bigint(20) DEFAULT NULL COMMENT 'buyerid',
+  	`seller_id` bigint(20) DEFAULT NULL COMMENT 'seller_id',
+  	PRIMARY KEY (`biz_order_id`,`value_type`),
+  	KEY `idx_biz_vertical_gmtmodified` (`gmt_modified`)
+	) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='datax perf test'
+
+
+单行记录类似于:
+
+   	   key_value: ;orderIds:20148888888,2014888888813800;
+  	  gmt_create: 2011-09-24 11:07:20
+	gmt_modified: 2011-10-24 17:56:34
+	attribute_cc: 1
+  	  value_type: 3
+    	buyer_id: 8888888
+   	   seller_id: 1
+
+#### 4.1.2 机器参数
+
+* 执行DataX的机器参数为:
+	1. cpu: 24核 Intel(R) Xeon(R) CPU E5-2630 0 @ 2.30GHz
+	2. mem: 48GB
+	3. net: 千兆双网卡
+	4. disc: DataX 数据不落磁盘,不统计此项
+
+* Mysql数据库机器参数为:
+	1. cpu: 32核 Intel(R) Xeon(R) CPU E5-2650 v2 @ 2.60GHz
+	2. mem: 256GB
+	3. net: 千兆双网卡
+	4. disc: BTWL419303E2800RGN  INTEL SSDSC2BB800G4   D2010370
+
+#### 4.1.3 DataX jvm 参数
+
+	-Xms1024m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
+
+
+### 4.2 测试报告
+
+#### 4.2.1 单表测试报告
+
+
+| 通道数|  批量提交行数| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡流出流量(MB/s)|DataX机器运行负载|DB网卡进入流量(MB/s)|DB运行负载|DB TPS|
+|--------|--------| --------|--------|--------|--------|--------|--------|--------|
+|1| 128 | 5319 | 0.260 | 0.580 | 0.05 | 0.620| 0.5 | 50 |
+|1| 512 | 14285 | 0.697 | 1.6 | 0.12 | 1.6 | 0.6 | 28 |
+|1| 1024 | 17241 | 0.842  | 1.9 | 0.20 | 1.9 | 0.6 | 16|
+|1| 2048 | 31250 | 1.49 | 2.8 | 0.15 | 3.0| 0.8 | 15 |
+|1| 4096 | 31250 | 1.49 | 3.5 | 0.20 | 3.6| 0.8 | 8 |
+|4| 128 | 11764 | 0.574 | 1.5 | 0.21 | 1.6| 0.8 | 112 |
+|4| 512 | 30769 | 1.47 | 3.5 | 0.3 | 3.6 | 0.9 | 88 |
+|4| 1024 | 50000 | 2.38 | 5.4 | 0.3 | 5.5 | 1.0 | 66 |
+|4| 2048 | 66666 | 3.18 | 7.0 | 0.3 | 7.1| 1.37 | 46 |
+|4| 4096 | 80000 | 3.81 | 7.3| 0.5 | 7.3| 1.40 | 26 |
+|8| 128 | 17777 | 0.868 | 2.9 | 0.28 | 2.9| 0.8 | 200 |
+|8| 512 | 57142 | 2.72 | 8.5 | 0.5 | 8.5| 0.70 | 159 |
+|8| 1024 | 88888 | 4.24 | 12.2 | 0.9 | 12.4 | 1.0 | 108 |
+|8| 2048 | 133333 | 6.36 | 14.7 | 0.9 | 14.7 | 1.0 | 81 |
+|8| 4096 | 166666 | 7.95 | 19.5 | 0.9 | 19.5 | 3.0 | 45 |
+|16| 128 | 32000 | 1.53 | 3.3 | 0.6 | 3.4 | 0.88 | 401 |
+|16| 512 | 106666 | 5.09 | 16.1| 0.9 | 16.2 | 2.16 | 260 |
+|16| 1024 | 173913 | 8.29 | 22.1| 1.5 | 22.2 | 4.5 | 200 |
+|16| 2048 | 228571 | 10.90 | 28.6 | 1.61 | 28.7 | 4.60 | 128 |
+|16| 4096 | 246153 | 11.74 | 31.1| 1.65 | 31.2| 4.66 | 57 |
+|32| 1024 | 246153 | 11.74 | 30.5| 3.17 | 30.7 | 12.10 | 270 |
+
+
+说明:
+
+1. 这里的单表,主键类型为 bigint(20),自增。
+2. batchSize 和 通道个数,对性能影响较大。
+3. 16通道,4096批量提交时,出现 full gc 2次。
+
+
+#### 4.2.2 分表测试报告(2个分库,每个分库4张分表,共计8张分表)
+
+
+| 通道数|  批量提交行数| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡流出流量(MB/s)|DataX机器运行负载|DB网卡进入流量(MB/s)|DB运行负载|DB TPS|
+|--------|--------| --------|--------|--------|--------|--------|--------|--------|
+|8| 128 | 26764 | 1.28 | 2.9 | 0.5 | 3.0| 0.8 | 209 |
+|8| 512 | 95180 | 4.54 | 10.5 | 0.7 | 10.9 | 0.8 | 188 |
+|8| 1024 | 94117 | 4.49  | 12.3 | 0.6 | 12.4 | 1.09 | 120 |
+|8| 2048 | 133333 | 6.36 | 19.4 | 0.9 | 19.5| 1.35 | 85 |
+|8| 4096 | 191692 | 9.14 | 22.1 | 1.0 | 22.2| 1.45 | 45 |
+
+
+#### 4.2.3 分表测试报告(2个分库,每个分库8张分表,共计16张分表)
+
+
+| 通道数|  批量提交行数| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡流出流量(MB/s)|DataX机器运行负载|DB网卡进入流量(MB/s)|DB运行负载|DB TPS|
+|--------|--------| --------|--------|--------|--------|--------|--------|--------|
+|16| 128 | 50124 | 2.39 | 5.6 | 0.40 | 6.0| 2.42 | 378 |
+|16| 512 | 155084 | 7.40 | 18.6 | 1.30 | 18.9| 2.82 | 325 |
+|16| 1024 | 177777 | 8.48 | 24.1 | 1.43 | 25.5| 3.5 | 233 |
+|16| 2048 | 289382 | 13.8 | 33.1 | 2.5 | 33.5| 4.5 | 150 |
+|16| 4096 | 326451 | 15.52 | 33.7 | 1.5 | 33.9| 4.3 | 80 |
+
+#### 4.2.4 性能测试小结
+1. 批量提交行数(batchSize)对性能影响很大,当 `batchSize>=512` 之后,单线程写入速度能达到每秒写入一万行
+2. 在 `batchSize>=512` 的基础上,随着通道数的增加(通道数<32),速度呈线性比增加。
+3. `通常不建议写入数据库时,通道个数 >32`
+
+
+## 5 约束限制
+
+
+
+
+## FAQ
+
+***
+
+**Q: MysqlWriter 执行 postSql 语句报错,那么数据导入到目标数据库了吗?**
+
+A: DataX 导入过程存在三块逻辑,pre 操作、导入操作、post 操作,其中任意一环报错,DataX 作业报错。由于 DataX 不能保证在同一个事务完成上述几个操作,因此有可能数据已经落入到目标端。
+
+***
+
+**Q: 按照上述说法,那么有部分脏数据导入数据库,如果影响到线上数据库怎么办?**
+
+A: 目前有两种解法,第一种配置 pre 语句,该 sql 可以清理当天导入数据, DataX 每次导入时候可以把上次清理干净并导入完整数据。第二种,向临时表导入数据,完成后再 rename 到线上表。
+
+***
+
+**Q: 上面第二种方法可以避免对线上数据造成影响,那我具体怎样操作?**
+
+A: 可以配置临时表导入

+ 21 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/pom.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+        <groupId>cn.tr</groupId>
+        <version>0.0.9</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>dataX-mysql-writer</artifactId>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>cn.tr</groupId>
+            <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+        </dependency>
+    </dependencies>
+</project>

+ 102 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/mysql-writer/src/main/java/cn/tr/plugin/dataX/writer/mysqlwriter/MysqlWriter.java

@@ -0,0 +1,102 @@
+package cn.tr.plugin.dataX.writer.mysqlwriter;
+
+import cn.tr.plugin.dataX.common.plugin.RecordReceiver;
+import cn.tr.plugin.dataX.common.spi.Writer;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.rdbms.util.DataBaseType;
+import cn.tr.plugin.dataX.rdbms.writer.CommonRdbmsWriter;
+import cn.tr.plugin.dataX.rdbms.writer.Key;
+
+import java.util.List;
+
+
+//TODO writeProxy
+public class MysqlWriter extends Writer {
+    private static final DataBaseType DATABASE_TYPE = DataBaseType.MySql;
+
+    public static class Job extends Writer.Job {
+        private Configuration originalConfig = null;
+        private CommonRdbmsWriter.Job commonRdbmsWriterJob;
+
+        @Override
+        public void preCheck(){
+            this.init();
+            this.commonRdbmsWriterJob.writerPreCheck(this.originalConfig, DATABASE_TYPE);
+        }
+
+        @Override
+        public void init() {
+            this.originalConfig = super.getPluginJobConf();
+            this.commonRdbmsWriterJob = new CommonRdbmsWriter.Job(DATABASE_TYPE);
+            this.commonRdbmsWriterJob.init(this.originalConfig);
+        }
+
+        // 一般来说,是需要推迟到 task 中进行pre 的执行(单表情况例外)
+        @Override
+        public void prepare() {
+            //实跑先不支持 权限 检验
+            //this.commonRdbmsWriterJob.privilegeValid(this.originalConfig, DATABASE_TYPE);
+            this.commonRdbmsWriterJob.prepare(this.originalConfig);
+        }
+
+        @Override
+        public List<Configuration> split(int mandatoryNumber) {
+            return this.commonRdbmsWriterJob.split(this.originalConfig, mandatoryNumber);
+        }
+
+        // 一般来说,是需要推迟到 task 中进行post 的执行(单表情况例外)
+        @Override
+        public void post() {
+            this.commonRdbmsWriterJob.post(this.originalConfig);
+        }
+
+        @Override
+        public void destroy() {
+            this.commonRdbmsWriterJob.destroy(this.originalConfig);
+        }
+
+    }
+
+    public static class Task extends Writer.Task {
+        private Configuration writerSliceConfig;
+        private CommonRdbmsWriter.Task commonRdbmsWriterTask;
+
+        @Override
+        public void init() {
+            this.writerSliceConfig = super.getPluginJobConf();
+            this.commonRdbmsWriterTask = new CommonRdbmsWriter.Task(DATABASE_TYPE);
+            this.commonRdbmsWriterTask.init(this.writerSliceConfig);
+        }
+
+        @Override
+        public void prepare() {
+            this.commonRdbmsWriterTask.prepare(this.writerSliceConfig);
+        }
+
+        @Override
+        //TODO 改用连接池,确保每次获取的连接都是可用的(注意:连接可能需要每次都初始化其 session)
+        public void startWrite(RecordReceiver recordReceiver) {
+            this.commonRdbmsWriterTask.startWrite(recordReceiver, this.writerSliceConfig,
+                    super.getTaskPluginCollector());
+        }
+
+        @Override
+        public void post() {
+            this.commonRdbmsWriterTask.post(this.writerSliceConfig);
+        }
+
+        @Override
+        public void destroy() {
+            this.commonRdbmsWriterTask.destroy(this.writerSliceConfig);
+        }
+
+        @Override
+        public boolean supportFailOver(){
+            String writeMode = writerSliceConfig.getString(Key.WRITE_MODE);
+            return "replace".equalsIgnoreCase(writeMode);
+        }
+
+    }
+
+
+}

+ 60 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/pom.xml

@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>tr-plugins</artifactId>
+        <groupId>cn.tr</groupId>
+        <version>${revision}</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <version>${revision}</version>
+    <modules>
+        <module>mysql-writer</module>
+        <module>mysql-reader</module>
+    </modules>
+    <packaging>pom</packaging>
+
+    <artifactId>tr-spring-boot-starter-plugin-dataX</artifactId>
+
+    <description>使用dataX实现数据对接</description>
+
+    <dependencies>
+        <dependency>
+            <groupId>cn.tr</groupId>
+            <artifactId>tr-framework</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>cn.hutool</groupId>
+            <artifactId>hutool-all</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>commons-cli</groupId>
+            <artifactId>commons-cli</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>druid-spring-boot-starter</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.groovy</groupId>
+            <artifactId>groovy</artifactId>
+            <version>4.0.3</version>
+        </dependency>
+    </dependencies>
+</project>

+ 25 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/base/BaseObject.java

@@ -0,0 +1,25 @@
+package cn.tr.plugin.dataX.common.base;
+//
+//import org.apache.commons.lang3.builder.EqualsBuilder;
+//import org.apache.commons.lang3.builder.HashCodeBuilder;
+//import org.apache.commons.lang3.builder.ToStringBuilder;
+//import org.apache.commons.lang3.builder.ToStringStyle;
+
+public class BaseObject {
+
+//	@Override
+//	public int hashCode() {
+//		return HashCodeBuilder.reflectionHashCode(this, false);
+//	}
+//
+//	@Override
+//	public boolean equals(Object object) {
+//		return EqualsBuilder.reflectionEquals(this, object, false);
+//	}
+//
+//	@Override
+//	public String toString() {
+//		return ToStringBuilder.reflectionToString(this,
+//				ToStringStyle.MULTI_LINE_STYLE);
+//	}
+}

+ 9 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/constant/CommonConstant.java

@@ -0,0 +1,9 @@
+package cn.tr.plugin.dataX.common.constant;
+
+public final class CommonConstant {
+    /**
+     * 用于插件对自身 split 的每个 task 标识其使用的资源,以告知core 对 reader/writer split 之后的 task 进行拼接时需要根据资源标签进行更有意义的 shuffle 操作
+     */
+    public static String LOAD_BALANCE_RESOURCE_MARK = "loadBalanceResourceMark";
+
+}

+ 20 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/constant/PluginType.java

@@ -0,0 +1,20 @@
+package cn.tr.plugin.dataX.common.constant;
+
+/**
+ * Created by jingxing on 14-8-31.
+ */
+public enum PluginType {
+    //pluginType还代表了资源目录,很难扩展,或者说需要足够必要才扩展。先mark Handler(其实和transformer一样),再讨论
+    READER("reader"), TRANSFORMER("transformer"), WRITER("writer"), HANDLER("handler");
+
+    private String pluginType;
+
+    private PluginType(String pluginType) {
+        this.pluginType = pluginType;
+    }
+
+    @Override
+    public String toString() {
+        return this.pluginType;
+    }
+}

+ 122 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/BoolColumn.java

@@ -0,0 +1,122 @@
+package cn.tr.plugin.dataX.common.element;
+
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+public class BoolColumn extends Column {
+
+	public BoolColumn(Boolean bool) {
+		super(bool, Column.Type.BOOL, 1);
+	}
+
+	public BoolColumn(final String data) {
+		this(true);
+		this.validate(data);
+		if (null == data) {
+			this.setRawData(null);
+			this.setByteSize(0);
+		} else {
+			this.setRawData(Boolean.valueOf(data));
+			this.setByteSize(1);
+		}
+		return;
+	}
+
+	public BoolColumn() {
+		super(null, Column.Type.BOOL, 1);
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		if (null == super.getRawData()) {
+			return null;
+		}
+
+		return (Boolean) super.getRawData();
+	}
+
+	@Override
+	public Long asLong() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return this.asBoolean() ? 1L : 0L;
+	}
+
+	@Override
+	public Double asDouble() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return this.asBoolean() ? 1.0d : 0.0d;
+	}
+
+	@Override
+	public String asString() {
+		if (null == super.getRawData()) {
+			return null;
+		}
+
+		return this.asBoolean() ? "true" : "false";
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return BigInteger.valueOf(this.asLong());
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return BigDecimal.valueOf(this.asLong());
+	}
+
+	@Override
+	public Date asDate() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bool类型不能转为Date .");
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bool类型不能转为Date .");
+	}
+	
+	@Override
+	public byte[] asBytes() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Boolean类型不能转为Bytes .");
+	}
+
+	private void validate(final String data) {
+		if (null == data) {
+			return;
+		}
+
+		if ("true".equalsIgnoreCase(data) || "false".equalsIgnoreCase(data)) {
+			return;
+		}
+
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT,
+				String.format("String[%s]不能转为Bool .", data));
+	}
+}

+ 90 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/BytesColumn.java

@@ -0,0 +1,90 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.hutool.core.util.ArrayUtil;
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+public class BytesColumn extends Column {
+
+	public BytesColumn() {
+		this(null);
+	}
+
+	public BytesColumn(byte[] bytes) {
+		super(ArrayUtil.clone(bytes), Column.Type.BYTES, null == bytes ? 0
+				: bytes.length);
+	}
+
+	@Override
+	public byte[] asBytes() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return (byte[]) this.getRawData();
+	}
+
+	@Override
+	public String asString() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		try {
+			return ColumnCast.bytes2String(this);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("Bytes[%s]不能转为String .", this.toString()));
+		}
+	}
+
+	@Override
+	public Long asLong() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Long .");
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为BigDecimal .");
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为BigInteger .");
+	}
+
+	@Override
+	public Double asDouble() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Long .");
+	}
+
+	@Override
+	public Date asDate() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Date .");
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Date .");
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Boolean .");
+	}
+}

+ 77 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/Column.java

@@ -0,0 +1,77 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.tr.core.utils.JsonUtils;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Created by jingxing on 14-8-24.
+ * <p/>
+ */
+public abstract class Column {
+
+	private Type type;
+
+	private Object rawData;
+
+	private int byteSize;
+
+	public Column(final Object object, final Type type, int byteSize) {
+		this.rawData = object;
+		this.type = type;
+		this.byteSize = byteSize;
+	}
+
+	public Object getRawData() {
+		return this.rawData;
+	}
+
+	public Type getType() {
+		return this.type;
+	}
+
+	public int getByteSize() {
+		return this.byteSize;
+	}
+
+	protected void setType(Type type) {
+		this.type = type;
+	}
+
+	protected void setRawData(Object rawData) {
+		this.rawData = rawData;
+	}
+
+	protected void setByteSize(int byteSize) {
+		this.byteSize = byteSize;
+	}
+
+	public abstract Long asLong();
+
+	public abstract Double asDouble();
+
+	public abstract String asString();
+
+	public abstract Date asDate();
+	
+	public abstract Date asDate(String dateFormat);
+
+	public abstract byte[] asBytes();
+
+	public abstract Boolean asBoolean();
+
+	public abstract BigDecimal asBigDecimal();
+
+	public abstract BigInteger asBigInteger();
+
+	@Override
+	public String toString() {
+		return JsonUtils.toJsonString(this);
+	}
+
+	public enum Type {
+		BAD, NULL, INT, LONG, DOUBLE, STRING, BOOL, DATE, BYTES
+	}
+}

+ 215 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/ColumnCast.java

@@ -0,0 +1,215 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.date.format.FastDateFormat;
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.util.Configuration;
+
+import java.io.UnsupportedEncodingException;
+import java.text.ParseException;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
+
+public final class ColumnCast {
+
+	public static void bind(final Configuration configuration) {
+		StringCast.init(configuration);
+		DateCast.init(configuration);
+		BytesCast.init(configuration);
+	}
+
+	public static Date string2Date(final StringColumn column)
+			throws ParseException {
+		return StringCast.asDate(column);
+	}
+	
+	public static Date string2Date(final StringColumn column, String dateFormat)
+			throws ParseException {
+		return StringCast.asDate(column, dateFormat);
+	}
+
+	public static byte[] string2Bytes(final StringColumn column)
+			throws UnsupportedEncodingException {
+		return StringCast.asBytes(column);
+	}
+
+	public static String date2String(final DateColumn column) {
+		return DateCast.asString(column);
+	}
+
+	public static String bytes2String(final BytesColumn column)
+			throws UnsupportedEncodingException {
+		return BytesCast.asString(column);
+	}
+}
+
+class StringCast {
+	static String datetimeFormat = "yyyy-MM-dd HH:mm:ss";
+
+	static String dateFormat = "yyyy-MM-dd";
+
+	static String timeFormat = "HH:mm:ss";
+
+	static List<String> extraFormats = Collections.emptyList();
+
+	static String timeZone = "GMT+8";
+
+	static FastDateFormat dateFormatter;
+
+	static FastDateFormat timeFormatter;
+
+	static FastDateFormat datetimeFormatter;
+
+	static TimeZone timeZoner;
+
+	static String encoding = "UTF-8";
+
+	static void init(final Configuration configuration) {
+		StringCast.datetimeFormat = configuration.getString(
+				"common.column.datetimeFormat", StringCast.datetimeFormat);
+		StringCast.dateFormat = configuration.getString(
+				"common.column.dateFormat", StringCast.dateFormat);
+		StringCast.timeFormat = configuration.getString(
+				"common.column.timeFormat", StringCast.timeFormat);
+		StringCast.extraFormats = configuration.getList(
+				"common.column.extraFormats", Collections.<String>emptyList(), String.class);
+
+		StringCast.timeZone = configuration.getString("common.column.timeZone",
+				StringCast.timeZone);
+		StringCast.timeZoner = TimeZone.getTimeZone(StringCast.timeZone);
+
+		StringCast.datetimeFormatter = FastDateFormat.getInstance(
+				StringCast.datetimeFormat, StringCast.timeZoner);
+		StringCast.dateFormatter = FastDateFormat.getInstance(
+				StringCast.dateFormat, StringCast.timeZoner);
+		StringCast.timeFormatter = FastDateFormat.getInstance(
+				StringCast.timeFormat, StringCast.timeZoner);
+
+		StringCast.encoding = configuration.getString("common.column.encoding",
+				StringCast.encoding);
+	}
+
+	static Date asDate(final StringColumn column) throws ParseException {
+		if (null == column.asString()) {
+			return null;
+		}
+
+		try {
+			return StringCast.datetimeFormatter.parse(column.asString());
+		} catch (ParseException ignored) {
+		}
+
+		try {
+			return StringCast.dateFormatter.parse(column.asString());
+		} catch (ParseException ignored) {
+		}
+
+		ParseException e;
+		try {
+			return StringCast.timeFormatter.parse(column.asString());
+		} catch (ParseException ignored) {
+			e = ignored;
+		}
+
+		for (String format : StringCast.extraFormats) {
+			try{
+				return FastDateFormat.getInstance(format, StringCast.timeZoner).parse(column.asString());
+			} catch (ParseException ignored){
+				e = ignored;
+			}
+		}
+		throw e;
+	}
+	
+	static Date asDate(final StringColumn column, String dateFormat) throws ParseException {
+		ParseException e;
+		try {
+			return FastDateFormat.getInstance(dateFormat, StringCast.timeZoner).parse(column.asString());
+		} catch (ParseException ignored) {
+			e = ignored;
+		}
+		throw e;
+	}
+
+	static byte[] asBytes(final StringColumn column)
+			throws UnsupportedEncodingException {
+		if (null == column.asString()) {
+			return null;
+		}
+
+		return column.asString().getBytes(StringCast.encoding);
+	}
+}
+
+/**
+ * 后续为了可维护性,可以考虑直接使用 apache 的DateFormatUtils.
+ * 
+ * 迟南已经修复了该问题,但是为了维护性,还是直接使用apache的内置函数
+ */
+class DateCast {
+
+	static String datetimeFormat = "yyyy-MM-dd HH:mm:ss";
+
+	static String dateFormat = "yyyy-MM-dd";
+
+	static String timeFormat = "HH:mm:ss";
+
+	static String timeZone = "GMT+8";
+
+	static TimeZone timeZoner = TimeZone.getTimeZone(DateCast.timeZone);
+
+	static void init(final Configuration configuration) {
+		DateCast.datetimeFormat = configuration.getString(
+				"common.column.datetimeFormat", datetimeFormat);
+		DateCast.timeFormat = configuration.getString(
+				"common.column.timeFormat", timeFormat);
+		DateCast.dateFormat = configuration.getString(
+				"common.column.dateFormat", dateFormat);
+		DateCast.timeZone = configuration.getString("common.column.timeZone",
+				DateCast.timeZone);
+		DateCast.timeZoner = TimeZone.getTimeZone(DateCast.timeZone);
+		return;
+	}
+
+	static String asString(final DateColumn column) {
+		if (null == column.asDate()) {
+			return null;
+		}
+
+		switch (column.getSubType()) {
+		case DATE:
+			return DateUtil.format(column.asDate(), DateCast.dateFormat);
+		case TIME:
+			return DateUtil.format(column.asDate(), DateCast.timeFormat);
+		case DATETIME:
+			return DateUtil.format(column.asDate(),
+					DateCast.datetimeFormat);
+		default:
+			throw DataXException
+					.asDataXException(TRExcCode.CONVERT_NOT_SUPPORT,
+							"时间类型出现不支持类型,目前仅支持DATE/TIME/DATETIME。该类型属于编程错误,请反馈给DataX开发团队 .");
+		}
+	}
+}
+
+class BytesCast {
+	static String encoding = "utf-8";
+
+	static void init(final Configuration configuration) {
+		BytesCast.encoding = configuration.getString("common.column.encoding",
+				BytesCast.encoding);
+		return;
+	}
+
+	static String asString(final BytesColumn column)
+			throws UnsupportedEncodingException {
+		if (null == column.asBytes()) {
+			return null;
+		}
+
+		return new String(column.asBytes(), encoding);
+	}
+}

+ 136 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/DateColumn.java

@@ -0,0 +1,136 @@
+package cn.tr.plugin.dataX.common.element;
+
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+public class DateColumn extends Column {
+
+	private DateType subType = DateType.DATETIME;
+
+	public static enum DateType {
+		DATE, TIME, DATETIME
+	}
+
+	/**
+	 * 构建值为null的DateColumn,使用Date子类型为DATETIME
+	 * */
+	public DateColumn() {
+		this((Long)null);
+	}
+
+	/**
+	 * 构建值为stamp(Unix时间戳)的DateColumn,使用Date子类型为DATETIME
+	 * 实际存储有date改为long的ms,节省存储
+	 * */
+	public DateColumn(final Long stamp) {
+		super(stamp, Column.Type.DATE, (null == stamp ? 0 : 8));
+	}
+
+	/**
+	 * 构建值为date(java.util.Date)的DateColumn,使用Date子类型为DATETIME
+	 * */
+	public DateColumn(final Date date) {
+		this(date == null ? null : date.getTime());
+	}
+
+	/**
+	 * 构建值为date(java.sql.Date)的DateColumn,使用Date子类型为DATE,只有日期,没有时间
+	 * */
+	public DateColumn(final java.sql.Date date) {
+		this(date == null ? null : date.getTime());
+		this.setSubType(DateType.DATE);
+	}
+
+	/**
+	 * 构建值为time(java.sql.Time)的DateColumn,使用Date子类型为TIME,只有时间,没有日期
+	 * */
+	public DateColumn(final java.sql.Time time) {
+		this(time == null ? null : time.getTime());
+		this.setSubType(DateType.TIME);
+	}
+
+	/**
+	 * 构建值为ts(java.sql.Timestamp)的DateColumn,使用Date子类型为DATETIME
+	 * */
+	public DateColumn(final java.sql.Timestamp ts) {
+		this(ts == null ? null : ts.getTime());
+		this.setSubType(DateType.DATETIME);
+	}
+
+	@Override
+	public Long asLong() {
+
+		return (Long)this.getRawData();
+	}
+
+	@Override
+	public String asString() {
+		try {
+			return ColumnCast.date2String(this);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("Date[%s]类型不能转为String .", this.toString()));
+		}
+	}
+
+	@Override
+	public Date asDate() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return new Date((Long)this.getRawData());
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		return asDate();
+	}
+
+	@Override
+	public byte[] asBytes() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Bytes .");
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Boolean .");
+	}
+
+	@Override
+	public Double asDouble() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Double .");
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Date类型不能转为BigInteger .");
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Date类型不能转为BigDecimal .");
+	}
+
+	public DateType getSubType() {
+		return subType;
+	}
+
+	public void setSubType(DateType subType) {
+		this.subType = subType;
+	}
+}

+ 167 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/DoubleColumn.java

@@ -0,0 +1,167 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+public class DoubleColumn extends Column {
+
+	public DoubleColumn(final String data) {
+		this(data, null == data ? 0 : data.length());
+		this.validate(data);
+	}
+
+	public DoubleColumn(Long data) {
+		this(data == null ? (String) null : String.valueOf(data));
+	}
+
+	public DoubleColumn(Integer data) {
+		this(data == null ? (String) null : String.valueOf(data));
+	}
+
+	/**
+	 * Double无法表示准确的小数数据,我们不推荐使用该方法保存Double数据,建议使用String作为构造入参
+	 * 
+	 * */
+	public DoubleColumn(final Double data) {
+		this(data == null ? (String) null
+				: new BigDecimal(String.valueOf(data)).toPlainString());
+	}
+
+	/**
+	 * Float无法表示准确的小数数据,我们不推荐使用该方法保存Float数据,建议使用String作为构造入参
+	 * 
+	 * */
+	public DoubleColumn(final Float data) {
+		this(data == null ? (String) null
+				: new BigDecimal(String.valueOf(data)).toPlainString());
+	}
+
+	public DoubleColumn(final BigDecimal data) {
+		this(null == data ? (String) null : data.toPlainString());
+	}
+
+	public DoubleColumn(final BigInteger data) {
+		this(null == data ? (String) null : data.toString());
+	}
+
+	public DoubleColumn() {
+		this((String) null);
+	}
+
+	private DoubleColumn(final String data, int byteSize) {
+		super(data, Column.Type.DOUBLE, byteSize);
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		try {
+			return new BigDecimal((String) this.getRawData());
+		} catch (NumberFormatException e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[%s] 无法转换为Double类型 .",
+							(String) this.getRawData()));
+		}
+	}
+
+	@Override
+	public Double asDouble() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		String string = (String) this.getRawData();
+
+		boolean isDoubleSpecific = string.equals("NaN")
+				|| string.equals("-Infinity") || string.equals("+Infinity");
+		if (isDoubleSpecific) {
+			return Double.valueOf(string);
+		}
+
+		BigDecimal result = this.asBigDecimal();
+		OverFlowUtil.validateDoubleNotOverFlow(result);
+
+		return result.doubleValue();
+	}
+
+	@Override
+	public Long asLong() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		BigDecimal result = this.asBigDecimal();
+		OverFlowUtil.validateLongNotOverFlow(result.toBigInteger());
+
+		return result.longValue();
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return this.asBigDecimal().toBigInteger();
+	}
+
+	@Override
+	public String asString() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+		return (String) this.getRawData();
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Bool .");
+	}
+
+	@Override
+	public Date asDate() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Date类型 .");
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Date类型 .");
+	}
+
+	@Override
+	public byte[] asBytes() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Bytes类型 .");
+	}
+
+	private void validate(final String data) {
+		if (null == data) {
+			return;
+		}
+
+		if (data.equalsIgnoreCase("NaN") || data.equalsIgnoreCase("-Infinity")
+				|| data.equalsIgnoreCase("Infinity")) {
+			return;
+		}
+
+		try {
+			new BigDecimal(data);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[%s]无法转为Double类型 .", data));
+		}
+	}
+
+}

+ 140 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/LongColumn.java

@@ -0,0 +1,140 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.hutool.core.util.NumberUtil;
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+public class LongColumn extends Column {
+
+	/**
+	 * 从整形字符串表示转为LongColumn,支持Java科学计数法
+	 * 
+	 * NOTE: <br>
+	 * 如果data为浮点类型的字符串表示,数据将会失真,请使用DoubleColumn对接浮点字符串
+	 * 
+	 * */
+	public LongColumn(final String data) {
+		super(null, Column.Type.LONG, 0);
+		if (null == data) {
+			return;
+		}
+
+		try {
+
+			BigInteger rawData =NumberUtil.toBigInteger(data);
+			super.setRawData(rawData);
+
+			// 当 rawData 为[0-127]时,rawData.bitLength() < 8,导致其 byteSize = 0,简单起见,直接认为其长度为 data.length()
+			// super.setByteSize(rawData.bitLength() / 8);
+			super.setByteSize(data.length());
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[%s]不能转为Long .", data));
+		}
+	}
+
+	public LongColumn(Long data) {
+		this(null == data ? (BigInteger) null : BigInteger.valueOf(data));
+	}
+
+	public LongColumn(Integer data) {
+		this(null == data ? (BigInteger) null : BigInteger.valueOf(data));
+	}
+
+	public LongColumn(BigInteger data) {
+		this(data, null == data ? 0 : 8);
+	}
+
+	private LongColumn(BigInteger data, int byteSize) {
+		super(data, Column.Type.LONG, byteSize);
+	}
+
+	public LongColumn() {
+		this((BigInteger) null);
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return (BigInteger) this.getRawData();
+	}
+
+	@Override
+	public Long asLong() {
+		BigInteger rawData = (BigInteger) this.getRawData();
+		if (null == rawData) {
+			return null;
+		}
+
+		OverFlowUtil.validateLongNotOverFlow(rawData);
+
+		return rawData.longValue();
+	}
+
+	@Override
+	public Double asDouble() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		BigDecimal decimal = this.asBigDecimal();
+		OverFlowUtil.validateDoubleNotOverFlow(decimal);
+
+		return decimal.doubleValue();
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return this.asBigInteger().compareTo(BigInteger.ZERO) != 0 ? true
+				: false;
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return new BigDecimal(this.asBigInteger());
+	}
+
+	@Override
+	public String asString() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+		return ((BigInteger) this.getRawData()).toString();
+	}
+
+	@Override
+	public Date asDate() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+		return new Date(this.asLong());
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		return this.asDate();
+	}
+
+	@Override
+	public byte[] asBytes() {
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT, "Long类型不能转为Bytes .");
+	}
+
+}

+ 62 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/OverFlowUtil.java

@@ -0,0 +1,62 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+public final class OverFlowUtil {
+	public static final BigInteger MAX_LONG = BigInteger
+			.valueOf(Long.MAX_VALUE);
+
+	public static final BigInteger MIN_LONG = BigInteger
+			.valueOf(Long.MIN_VALUE);
+
+	public static final BigDecimal MIN_DOUBLE_POSITIVE = new BigDecimal(
+			String.valueOf(Double.MIN_VALUE));
+
+	public static final BigDecimal MAX_DOUBLE_POSITIVE = new BigDecimal(
+			String.valueOf(Double.MAX_VALUE));
+
+	public static boolean isLongOverflow(final BigInteger integer) {
+		return (integer.compareTo(OverFlowUtil.MAX_LONG) > 0 || integer
+				.compareTo(OverFlowUtil.MIN_LONG) < 0);
+
+	}
+
+	public static void validateLongNotOverFlow(final BigInteger integer) {
+		boolean isOverFlow = OverFlowUtil.isLongOverflow(integer);
+
+		if (isOverFlow) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_OVER_FLOW,
+					String.format("[%s] 转为Long类型出现溢出 .", integer.toString()));
+		}
+	}
+
+	public static boolean isDoubleOverFlow(final BigDecimal decimal) {
+		if (decimal.signum() == 0) {
+			return false;
+		}
+
+		BigDecimal newDecimal = decimal;
+		boolean isPositive = decimal.signum() == 1;
+		if (!isPositive) {
+			newDecimal = decimal.negate();
+		}
+
+		return (newDecimal.compareTo(MIN_DOUBLE_POSITIVE) < 0 || newDecimal
+				.compareTo(MAX_DOUBLE_POSITIVE) > 0);
+	}
+
+	public static void validateDoubleNotOverFlow(final BigDecimal decimal) {
+		boolean isOverFlow = OverFlowUtil.isDoubleOverFlow(decimal);
+		if (isOverFlow) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_OVER_FLOW,
+					String.format("[%s]转为Double类型出现溢出 .",
+							decimal.toPlainString()));
+		}
+	}
+}

+ 29 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/Record.java

@@ -0,0 +1,29 @@
+package cn.tr.plugin.dataX.common.element;
+
+import java.util.Map;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+
+public interface Record {
+
+	public void addColumn(Column column);
+
+	public void setColumn(int i, final Column column);
+
+	public Column getColumn(int i);
+
+	public String toString();
+
+	public int getColumnNumber();
+
+	public int getByteSize();
+
+	public int getMemorySize();
+
+	public void setMeta(Map<String, String> meta);
+
+	public Map<String, String> getMeta();
+
+}

+ 173 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/element/StringColumn.java

@@ -0,0 +1,173 @@
+package cn.tr.plugin.dataX.common.element;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+
+public class StringColumn extends Column {
+
+	public StringColumn() {
+		this((String) null);
+	}
+
+	public StringColumn(final String rawData) {
+		super(rawData, Column.Type.STRING, (null == rawData ? 0 : rawData
+				.length()));
+	}
+
+	@Override
+	public String asString() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		return (String) this.getRawData();
+	}
+
+	private void validateDoubleSpecific(final String data) {
+		if ("NaN".equals(data) || "Infinity".equals(data)
+				|| "-Infinity".equals(data)) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[\"%s\"]属于Double特殊类型,不能转为其他类型 .", data));
+		}
+
+		return;
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		this.validateDoubleSpecific((String) this.getRawData());
+
+		try {
+			return this.asBigDecimal().toBigInteger();
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT, String.format(
+							"String[\"%s\"]不能转为BigInteger .", this.asString()));
+		}
+	}
+
+	@Override
+	public Long asLong() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		this.validateDoubleSpecific((String) this.getRawData());
+
+		try {
+			BigInteger integer = this.asBigInteger();
+			OverFlowUtil.validateLongNotOverFlow(integer);
+			return integer.longValue();
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[\"%s\"]不能转为Long .", this.asString()));
+		}
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		this.validateDoubleSpecific((String) this.getRawData());
+
+		try {
+			return new BigDecimal(this.asString());
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT, String.format(
+							"String [\"%s\"] 不能转为BigDecimal .", this.asString()));
+		}
+	}
+
+	@Override
+	public Double asDouble() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		String data = (String) this.getRawData();
+		if ("NaN".equals(data)) {
+			return Double.NaN;
+		}
+
+		if ("Infinity".equals(data)) {
+			return Double.POSITIVE_INFINITY;
+		}
+
+		if ("-Infinity".equals(data)) {
+			return Double.NEGATIVE_INFINITY;
+		}
+
+		BigDecimal decimal = this.asBigDecimal();
+		OverFlowUtil.validateDoubleNotOverFlow(decimal);
+
+		return decimal.doubleValue();
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		if (null == this.getRawData()) {
+			return null;
+		}
+
+		if ("true".equalsIgnoreCase(this.asString())) {
+			return true;
+		}
+
+		if ("false".equalsIgnoreCase(this.asString())) {
+			return false;
+		}
+
+		throw DataXException.asDataXException(
+				TRExcCode.CONVERT_NOT_SUPPORT,
+				String.format("String[\"%s\"]不能转为Bool .", this.asString()));
+	}
+
+	@Override
+	public Date asDate() {
+		try {
+			return ColumnCast.string2Date(this);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[\"%s\"]不能转为Date .", this.asString()));
+		}
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		try {
+			return ColumnCast.string2Date(this, dateFormat);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[\"%s\"]不能转为Date .", this.asString()));
+		}
+	}
+
+	@Override
+	public byte[] asBytes() {
+		try {
+			return ColumnCast.string2Bytes(this);
+		} catch (Exception e) {
+			throw DataXException.asDataXException(
+					TRExcCode.CONVERT_NOT_SUPPORT,
+					String.format("String[\"%s\"]不能转为Bytes .", this.asString()));
+		}
+	}
+}

+ 72 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/exception/DataXException.java

@@ -0,0 +1,72 @@
+package cn.tr.plugin.dataX.common.exception;
+
+
+
+import cn.tr.core.exception.BaseCode;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+public class DataXException extends RuntimeException {
+
+    private static final long serialVersionUID = 1L;
+
+    private BaseCode code;
+
+    public DataXException(BaseCode code, String errorMessage) {
+        super(code.toString() + " - " + errorMessage);
+        this.code = code;
+    }
+
+    public DataXException(String errorMessage) {
+        super(errorMessage);
+    }
+
+    private DataXException(BaseCode code, String errorMessage, Throwable cause) {
+        super(code.toString() + " - " + getMessage(errorMessage) + " - " + getMessage(cause), cause);
+
+        this.code = code;
+    }
+
+    public static DataXException asDataXException(BaseCode code, String message) {
+        return new DataXException(code, message);
+    }
+
+    public static DataXException asDataXException(String message) {
+        return new DataXException(message);
+    }
+
+    public static DataXException asDataXException(BaseCode code, String message, Throwable cause) {
+        if (cause instanceof DataXException) {
+            return (DataXException) cause;
+        }
+        return new DataXException(code, message, cause);
+    }
+
+    public static DataXException asDataXException(BaseCode code, Throwable cause) {
+        if (cause instanceof DataXException) {
+            return (DataXException) cause;
+        }
+        return new DataXException(code, getMessage(cause), cause);
+    }
+
+    public BaseCode getCode() {
+        return this.code;
+    }
+
+    private static String getMessage(Object obj) {
+        if (obj == null) {
+            return "";
+        }
+
+        if (obj instanceof Throwable) {
+            StringWriter str = new StringWriter();
+            PrintWriter pw = new PrintWriter(str);
+            ((Throwable) obj).printStackTrace(pw);
+            return str.toString();
+            // return ((Throwable) obj).getMessage();
+        } else {
+            return obj.toString();
+        }
+    }
+}

+ 15 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/exception/ExceptionTracker.java

@@ -0,0 +1,15 @@
+package cn.tr.plugin.dataX.common.exception;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+public final class ExceptionTracker {
+    public static final int STRING_BUFFER = 1024;
+
+    public static String trace(Throwable ex) {
+        StringWriter sw = new StringWriter(STRING_BUFFER);
+        PrintWriter pw = new PrintWriter(sw);
+        ex.printStackTrace(pw);
+        return sw.toString();
+    }
+}

+ 25 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractJobPlugin.java

@@ -0,0 +1,25 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+public abstract class AbstractJobPlugin extends AbstractPlugin {
+	/**
+	 * @return the jobPluginCollector
+	 */
+	public JobPluginCollector getJobPluginCollector() {
+		return jobPluginCollector;
+	}
+
+	/**
+	 * @param jobPluginCollector
+	 *            the jobPluginCollector to set
+	 */
+	public void setJobPluginCollector(
+            JobPluginCollector jobPluginCollector) {
+		this.jobPluginCollector = jobPluginCollector;
+	}
+
+	private JobPluginCollector jobPluginCollector;
+
+}

+ 99 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractPlugin.java

@@ -0,0 +1,99 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+import cn.tr.plugin.dataX.common.base.BaseObject;
+import cn.tr.plugin.dataX.common.util.Configuration;
+
+import java.util.List;
+
+public abstract class AbstractPlugin extends BaseObject implements Pluginable {
+	//作业的config
+    private Configuration pluginJobConf;
+
+    //插件本身的plugin
+	private Configuration pluginConf;
+
+    // by qiangsi.lq。 修改为对端的作业configuration
+    private Configuration peerPluginJobConf;
+
+    private String peerPluginName;
+
+    private List<Configuration> readerPluginSplitConf;
+
+    @Override
+	public String getPluginName() {
+		assert null != this.pluginConf;
+		return this.pluginConf.getString("name");
+	}
+
+    @Override
+	public String getDeveloper() {
+		assert null != this.pluginConf;
+		return this.pluginConf.getString("developer");
+	}
+
+    @Override
+	public String getDescription() {
+		assert null != this.pluginConf;
+		return this.pluginConf.getString("description");
+	}
+
+    @Override
+	public Configuration getPluginJobConf() {
+		return pluginJobConf;
+	}
+
+    @Override
+	public void setPluginJobConf(Configuration pluginJobConf) {
+		this.pluginJobConf = pluginJobConf;
+	}
+
+    @Override
+	public void setPluginConf(Configuration pluginConf) {
+		this.pluginConf = pluginConf;
+	}
+
+    @Override
+    public Configuration getPeerPluginJobConf() {
+        return peerPluginJobConf;
+    }
+
+    @Override
+    public void setPeerPluginJobConf(Configuration peerPluginJobConf) {
+        this.peerPluginJobConf = peerPluginJobConf;
+    }
+
+    @Override
+    public String getPeerPluginName() {
+        return peerPluginName;
+    }
+
+    @Override
+    public void setPeerPluginName(String peerPluginName) {
+        this.peerPluginName = peerPluginName;
+    }
+
+    public void preCheck() {
+    }
+
+	public void prepare() {
+	}
+
+	public void post() {
+	}
+
+    public void preHandler(Configuration jobConfiguration){
+
+    }
+
+    public void postHandler(Configuration jobConfiguration){
+
+    }
+
+    public List<Configuration> getReaderPluginSplitConf(){
+        return this.readerPluginSplitConf;
+    }
+
+    public void setReaderPluginSplitConf(List<Configuration> readerPluginSplitConf){
+        this.readerPluginSplitConf = readerPluginSplitConf;
+    }
+}

+ 37 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/AbstractTaskPlugin.java

@@ -0,0 +1,37 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+/**
+ * Created by jingxing on 14-8-24.
+ */
+public abstract class AbstractTaskPlugin extends AbstractPlugin {
+
+    //TaskPlugin 应该具备taskId
+    private int taskGroupId;
+    private int taskId;
+    private TaskPluginCollector taskPluginCollector;
+
+    public TaskPluginCollector getTaskPluginCollector() {
+        return taskPluginCollector;
+    }
+
+    public void setTaskPluginCollector(
+            TaskPluginCollector taskPluginCollector) {
+        this.taskPluginCollector = taskPluginCollector;
+    }
+
+    public int getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(int taskId) {
+        this.taskId = taskId;
+    }
+
+    public int getTaskGroupId() {
+        return taskGroupId;
+    }
+
+    public void setTaskGroupId(int taskGroupId) {
+        this.taskGroupId = taskGroupId;
+    }
+}

+ 22 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/JobPluginCollector.java

@@ -0,0 +1,22 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by jingxing on 14-9-9.
+ */
+public interface JobPluginCollector extends PluginCollector {
+
+	/**
+	 * 从Task获取自定义收集信息
+	 * 
+	 * */
+	Map<String, List<String>> getMessage();
+
+	/**
+	 * 从Task获取自定义收集信息
+	 * 
+	 * */
+	List<String> getMessage(String key);
+}

+ 9 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/PluginCollector.java

@@ -0,0 +1,9 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+
+/**
+ * 这里只是一个标示类
+ * */
+public interface PluginCollector {
+
+}

+ 30 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/Pluginable.java

@@ -0,0 +1,30 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+
+public interface Pluginable {
+	String getDeveloper();
+
+    String getDescription();
+
+    void setPluginConf(Configuration pluginConf);
+
+	void init();
+
+	void destroy();
+
+    String getPluginName();
+
+    Configuration getPluginJobConf();
+
+    Configuration getPeerPluginJobConf();
+
+    public String getPeerPluginName();
+
+    void setPluginJobConf(Configuration jobConf);
+
+    void setPeerPluginJobConf(Configuration peerPluginJobConf);
+
+    public void setPeerPluginName(String peerPluginName);
+
+}

+ 26 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/RecordReceiver.java

@@ -0,0 +1,26 @@
+/**
+ *  (C) 2010-2013 Alibaba Group Holding Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package cn.tr.plugin.dataX.common.plugin;
+
+import cn.tr.plugin.dataX.common.element.Record;
+
+public interface RecordReceiver {
+
+	public Record getFromReader();
+
+	public void shutdown();
+}

+ 32 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/RecordSender.java

@@ -0,0 +1,32 @@
+/**
+ *  (C) 2010-2013 Alibaba Group Holding Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package cn.tr.plugin.dataX.common.plugin;
+
+import cn.tr.plugin.dataX.common.element.Record;
+
+public interface RecordSender {
+
+	public Record createRecord();
+
+	public void sendToWriter(Record record);
+
+	public void flush();
+
+	public void terminate();
+
+	public void shutdown();
+}

+ 57 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/plugin/TaskPluginCollector.java

@@ -0,0 +1,57 @@
+package cn.tr.plugin.dataX.common.plugin;
+
+import cn.tr.plugin.dataX.common.element.Record;
+
+/**
+ * 
+ * 该接口提供给Task Plugin用来记录脏数据和自定义信息。 <br >
+ * 
+ * 1. 脏数据记录,TaskPluginCollector提供多种脏数据记录的适配,包括本地输出、集中式汇报等等<br >
+ * 2. 自定义信息,所有的task插件运行过程中可以通过TaskPluginCollector收集信息, <br >
+ * Job的插件在POST过程中通过getMessage()接口获取信息
+ */
+public abstract class TaskPluginCollector implements PluginCollector {
+	/**
+	 * 收集脏数据
+	 * 
+	 * @param dirtyRecord
+	 *            脏数据信息
+	 * @param t
+	 *            异常信息
+	 * @param errorMessage
+	 *            错误的提示信息
+	 */
+	public abstract void collectDirtyRecord(final Record dirtyRecord,
+                                            final Throwable t, final String errorMessage);
+
+	/**
+	 * 收集脏数据
+	 * 
+	 * @param dirtyRecord
+	 *            脏数据信息
+	 * @param errorMessage
+	 *            错误的提示信息
+	 */
+	public void collectDirtyRecord(final Record dirtyRecord,
+			final String errorMessage) {
+		this.collectDirtyRecord(dirtyRecord, null, errorMessage);
+	}
+
+	/**
+	 * 收集脏数据
+	 * 
+	 * @param dirtyRecord
+	 *            脏数据信息
+	 * @param t
+	 *            异常信息
+	 */
+	public void collectDirtyRecord(final Record dirtyRecord, final Throwable t) {
+		this.collectDirtyRecord(dirtyRecord, t, "");
+	}
+
+	/**
+	 * 收集自定义信息,Job插件可以通过getMessage获取该信息 <br >
+	 * 如果多个key冲突,内部使用List记录同一个key,多个value情况。<br >
+	 * */
+	public abstract void collectMessage(final String key, final String value);
+}

+ 27 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Hook.java

@@ -0,0 +1,27 @@
+package cn.tr.plugin.dataX.common.spi;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+
+import java.util.Map;
+
+/**
+ * Created by xiafei.qiuxf on 14/12/17.
+ */
+public interface Hook {
+
+    /**
+     * 返回名字
+     *
+     * @return
+     */
+    public String getName();
+
+    /**
+     * TODO 文档
+     *
+     * @param jobConf
+     * @param msg
+     */
+    public void invoke(Configuration jobConf, Map<String, Number> msg);
+
+}

+ 51 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Reader.java

@@ -0,0 +1,51 @@
+package cn.tr.plugin.dataX.common.spi;
+
+import cn.tr.plugin.dataX.common.base.BaseObject;
+import cn.tr.plugin.dataX.common.plugin.AbstractJobPlugin;
+import cn.tr.plugin.dataX.common.plugin.AbstractTaskPlugin;
+import cn.tr.plugin.dataX.common.plugin.RecordSender;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import java.util.*;
+
+/**
+ * 每个Reader插件在其内部内部实现Job、Task两个内部类。
+ * 
+ * 
+ * */
+public abstract class Reader extends BaseObject {
+
+	/**
+	 * 每个Reader插件必须实现Job内部类。
+	 * 
+	 * */
+	public static abstract class Job extends AbstractJobPlugin {
+
+		/**
+		 * 切分任务
+		 * 
+		 * @param adviceNumber
+		 * 
+		 *            着重说明下,adviceNumber是框架建议插件切分的任务数,插件开发人员最好切分出来的任务数>=
+		 *            adviceNumber。<br>
+		 * <br>
+		 *            之所以采取这个建议是为了给用户最好的实现,例如框架根据计算认为用户数据存储可以支持100个并发连接,
+		 *            并且用户认为需要100个并发。 此时,插件开发人员如果能够根据上述切分规则进行切分并做到>=100连接信息,
+		 *            DataX就可以同时启动100个Channel,这样给用户最好的吞吐量 <br>
+		 *            例如用户同步一张Mysql单表,但是认为可以到10并发吞吐量,插件开发人员最好对该表进行切分,比如使用主键范围切分,
+		 *            并且如果最终切分任务数到>=10,我们就可以提供给用户最大的吞吐量。 <br>
+		 * <br>
+		 *            当然,我们这里只是提供一个建议值,Reader插件可以按照自己规则切分。但是我们更建议按照框架提供的建议值来切分。 <br>
+		 * <br>
+		 *            对于ODPS写入OTS而言,如果存在预排序预切分问题,这样就可能只能按照分区信息切分,无法更细粒度切分,
+		 *            这类情况只能按照源头物理信息切分规则切分。 <br>
+		 * <br>
+		 * 
+		 * 
+		 * */
+		public abstract List<Configuration> split(int adviceNumber);
+	}
+
+	public static abstract class Task extends AbstractTaskPlugin {
+		public abstract void startRead(RecordSender recordSender);
+	}
+}

+ 40 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/spi/Writer.java

@@ -0,0 +1,40 @@
+package cn.tr.plugin.dataX.common.spi;
+
+import cn.tr.plugin.dataX.common.base.BaseObject;
+import cn.tr.plugin.dataX.common.plugin.AbstractJobPlugin;
+import cn.tr.plugin.dataX.common.plugin.AbstractTaskPlugin;
+import cn.tr.plugin.dataX.common.plugin.RecordReceiver;
+import cn.tr.plugin.dataX.common.util.Configuration;
+
+import java.util.List;
+
+/**
+ * 每个Writer插件需要实现Writer类,并在其内部实现Job、Task两个内部类。
+ * 
+ * 
+ * */
+public abstract class Writer extends BaseObject {
+	/**
+	 * 每个Writer插件必须实现Job内部类
+	 */
+	public abstract static class Job extends AbstractJobPlugin {
+		/**
+		 * 切分任务。<br>
+		 * 
+		 * @param mandatoryNumber
+		 *            为了做到Reader、Writer任务数对等,这里要求Writer插件必须按照源端的切分数进行切分。否则框架报错!
+		 * 
+		 * */
+		public abstract List<Configuration> split(int mandatoryNumber);
+	}
+
+	/**
+	 * 每个Writer插件必须实现Task内部类
+	 */
+	public abstract static class Task extends AbstractTaskPlugin {
+
+		public abstract void startWrite(RecordReceiver lineReceiver);
+
+		public boolean supportFailOver(){return false;}
+	}
+}

+ 277 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/PerfRecord.java

@@ -0,0 +1,277 @@
+package cn.tr.plugin.dataX.common.statistics;
+
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.net.NetUtil;
+import cn.hutool.core.util.ObjectUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.InetAddress;
+import java.util.Date;
+
+/**
+ * Created by liqiang on 15/8/23.
+ */
+@SuppressWarnings("NullableProblems")
+public class PerfRecord implements Comparable<PerfRecord> {
+    private static Logger perf = LoggerFactory.getLogger(PerfRecord.class);
+    private static String datetimeFormat = "yyyy-MM-dd HH:mm:ss";
+
+
+    public enum PHASE {
+        /**
+         * task total运行的时间,前10为框架统计,后面为部分插件的个性统计
+         */
+        TASK_TOTAL(0),
+
+        READ_TASK_INIT(1),
+        READ_TASK_PREPARE(2),
+        READ_TASK_DATA(3),
+        READ_TASK_POST(4),
+        READ_TASK_DESTROY(5),
+
+        WRITE_TASK_INIT(6),
+        WRITE_TASK_PREPARE(7),
+        WRITE_TASK_DATA(8),
+        WRITE_TASK_POST(9),
+        WRITE_TASK_DESTROY(10),
+
+        /**
+         * SQL_QUERY: sql query阶段, 部分reader的个性统计
+         */
+        SQL_QUERY(100),
+        /**
+         * 数据从sql全部读出来
+         */
+        RESULT_NEXT_ALL(101),
+
+        /**
+         * only odps block close
+         */
+        ODPS_BLOCK_CLOSE(102),
+
+        WAIT_READ_TIME(103),
+
+        WAIT_WRITE_TIME(104),
+
+        TRANSFORMER_TIME(201);
+
+        private int val;
+
+        PHASE(int val) {
+            this.val = val;
+        }
+
+        public int toInt(){
+            return val;
+        }
+    }
+
+    public enum ACTION{
+        start,
+        end
+    }
+
+    private final int taskGroupId;
+    private final int taskId;
+    private final PHASE phase;
+    private volatile ACTION action;
+    private volatile Date startTime;
+    private volatile long elapsedTimeInNs = -1;
+    private volatile long count = 0;
+    private volatile long size = 0;
+
+    private volatile long startTimeInNs;
+    private volatile boolean isReport = false;
+
+    public PerfRecord(int taskGroupId, int taskId, PHASE phase) {
+        this.taskGroupId = taskGroupId;
+        this.taskId = taskId;
+        this.phase = phase;
+    }
+
+    public static void addPerfRecord(int taskGroupId, int taskId, PHASE phase, long startTime,long elapsedTimeInNs) {
+        if(PerfTrace.getInstance().isEnable()) {
+            PerfRecord perfRecord = new PerfRecord(taskGroupId, taskId, phase);
+            perfRecord.elapsedTimeInNs = elapsedTimeInNs;
+            perfRecord.action = ACTION.end;
+            perfRecord.startTime = new Date(startTime);
+            //在PerfTrace里注册
+            PerfTrace.getInstance().tracePerfRecord(perfRecord);
+            perf.info(perfRecord.toString());
+        }
+    }
+
+    public void start() {
+        if(PerfTrace.getInstance().isEnable()) {
+            this.startTime = new Date();
+            this.startTimeInNs = System.nanoTime();
+            this.action = ACTION.start;
+            //在PerfTrace里注册
+            PerfTrace.getInstance().tracePerfRecord(this);
+            perf.info(toString());
+        }
+    }
+
+    public void addCount(long count) {
+        this.count += count;
+    }
+
+    public void addSize(long size) {
+        this.size += size;
+    }
+
+    public void end() {
+        if(PerfTrace.getInstance().isEnable()) {
+            this.elapsedTimeInNs = System.nanoTime() - startTimeInNs;
+            this.action = ACTION.end;
+            PerfTrace.getInstance().tracePerfRecord(this);
+            perf.info(toString());
+        }
+    }
+
+    public void end(long elapsedTimeInNs) {
+        if(PerfTrace.getInstance().isEnable()) {
+            this.elapsedTimeInNs = elapsedTimeInNs;
+            this.action = ACTION.end;
+            PerfTrace.getInstance().tracePerfRecord(this);
+            perf.info(toString());
+        }
+    }
+
+    @Override
+    public String toString() {
+        return String.format("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s"
+                , getInstId(), taskGroupId, taskId, phase, action,
+                DateUtil.format(startTime, datetimeFormat), elapsedTimeInNs, count, size,getHostIP());
+    }
+
+
+    @Override
+    public int compareTo(PerfRecord o) {
+        if (o == null) {
+            return 1;
+        }
+        return this.elapsedTimeInNs > o.elapsedTimeInNs ? 1 : this.elapsedTimeInNs == o.elapsedTimeInNs ? 0 : -1;
+    }
+
+    @Override
+    public int hashCode() {
+        long jobId = getInstId();
+        int result = (int) (jobId ^ (jobId >>> 32));
+        result = 31 * result + taskGroupId;
+        result = 31 * result + taskId;
+        result = 31 * result + phase.toInt();
+        result = 31 * result + (startTime != null ? startTime.hashCode() : 0);
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if(!(o instanceof PerfRecord)){
+            return false;
+        }
+
+        PerfRecord dst = (PerfRecord)o;
+
+        if (this.getInstId() != dst.getInstId()) {
+            return false;
+        }
+        if (this.taskGroupId != dst.taskGroupId) {
+            return false;
+        }
+        if (this.taskId != dst.taskId) {
+            return false;
+        }
+        if (phase != null ? !phase.equals(dst.phase) : dst.phase != null) {
+            return false;
+        }
+        if (startTime != null ? !startTime.equals(dst.startTime) : dst.startTime != null) {
+            return false;
+        }
+        return true;
+    }
+
+    public PerfRecord copy() {
+        PerfRecord copy = new PerfRecord(this.taskGroupId, this.getTaskId(), this.phase);
+        copy.action = this.action;
+        copy.startTime = this.startTime;
+        copy.elapsedTimeInNs = this.elapsedTimeInNs;
+        copy.count = this.count;
+        copy.size = this.size;
+        return copy;
+    }
+    public int getTaskGroupId() {
+        return taskGroupId;
+    }
+
+    public int getTaskId() {
+        return taskId;
+    }
+
+    public PHASE getPhase() {
+        return phase;
+    }
+
+    public ACTION getAction() {
+        return action;
+    }
+
+    public long getElapsedTimeInNs() {
+        return elapsedTimeInNs;
+    }
+
+    public long getCount() {
+        return count;
+    }
+
+    public long getSize() {
+        return size;
+    }
+
+    public long getInstId(){
+        return PerfTrace.getInstance().getInstId();
+    }
+
+    public String getHostIP(){
+        InetAddress localhost = NetUtil.getLocalhost();
+        if(ObjectUtil.isNotNull(localhost)){
+            return localhost.getHostAddress();
+        }
+        return "";
+    }
+
+    public String getHostName(){
+        return NetUtil.getLocalHostName();
+    }
+
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    public long getStartTimeInMs() {
+        return startTime.getTime();
+    }
+
+    public long getStartTimeInNs() {
+        return startTimeInNs;
+    }
+
+    public String getDatetime(){
+        if(startTime == null){
+            return "null time";
+        }
+        return DateUtil.format(startTime, datetimeFormat);
+    }
+
+    public boolean isReport() {
+        return isReport;
+    }
+
+    public void setIsReport(boolean isReport) {
+        this.isReport = isReport;
+    }
+}

+ 906 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/PerfTrace.java

@@ -0,0 +1,906 @@
+package cn.tr.plugin.dataX.common.statistics;
+
+import cn.hutool.core.net.NetUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import cn.tr.plugin.dataX.common.statistics.PerfRecord.PHASE;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * PerfTrace 记录 job(local模式),taskGroup(distribute模式),因为这2种都是jvm,即一个jvm里只需要有1个PerfTrace。
+ */
+
+public class PerfTrace {
+
+    private static Logger LOG = LoggerFactory.getLogger(PerfTrace.class);
+    private static PerfTrace instance;
+    private static final Object lock = new Object();
+    private String perfTraceId;
+    private volatile boolean enable;
+    private volatile boolean isJob;
+    private long instId;
+    private long jobId;
+    private long jobVersion;
+    private int taskGroupId;
+    private int channelNumber;
+
+    private int priority;
+    private int batchSize = 500;
+    private volatile boolean perfReportEnable = true;
+
+    //jobid_jobversion,instanceid,taskid, src_mark, dst_mark,
+    private Map<Integer, String> taskDetails = new ConcurrentHashMap<Integer, String>();
+    //PHASE => PerfRecord
+    private ConcurrentHashMap<PHASE, SumPerfRecord4Print> perfRecordMaps4print = new ConcurrentHashMap<PHASE, SumPerfRecord4Print>();
+    // job_phase => SumPerf4Report
+    private SumPerf4Report sumPerf4Report = new SumPerf4Report();
+    private SumPerf4Report sumPerf4Report4NotEnd;
+    private Configuration jobInfo;
+    private final Set<PerfRecord> needReportPool4NotEnd = new HashSet<PerfRecord>();
+    private final List<PerfRecord> totalEndReport = new ArrayList<PerfRecord>();
+
+    /**
+     * 单实例
+     *
+     * @param isJob
+     * @param jobId
+     * @param taskGroupId
+     * @return
+     */
+    public static PerfTrace getInstance(boolean isJob, long jobId, int taskGroupId, int priority, boolean enable) {
+
+        if (instance == null) {
+            synchronized (lock) {
+                if (instance == null) {
+                    instance = new PerfTrace(isJob, jobId, taskGroupId, priority, enable);
+                }
+            }
+        }
+        return instance;
+    }
+
+    /**
+     * 因为一个JVM只有一个,因此在getInstance(isJob,jobId,taskGroupId)调用完成实例化后,方便后续调用,直接返回该实例
+     *
+     * @return
+     */
+    public static PerfTrace getInstance() {
+        if (instance == null) {
+            LOG.error("PerfTrace instance not be init! must have some error! ");
+            synchronized (lock) {
+                if (instance == null) {
+                    instance = new PerfTrace(false, -1111, -1111, 0, false);
+                }
+            }
+        }
+        return instance;
+    }
+
+    private PerfTrace(boolean isJob, long jobId, int taskGroupId, int priority, boolean enable) {
+        try {
+            this.perfTraceId = isJob ? "job_" + jobId : String.format("taskGroup_%s_%s", jobId, taskGroupId);
+            this.enable = enable;
+            this.isJob = isJob;
+            this.taskGroupId = taskGroupId;
+            this.instId = jobId;
+            this.priority = priority;
+            LOG.info(String.format("PerfTrace traceId=%s, isEnable=%s, priority=%s", this.perfTraceId, this.enable, this.priority));
+
+        } catch (Exception e) {
+            // do nothing
+            this.enable = false;
+        }
+    }
+
+    public void addTaskDetails(int taskId, String detail) {
+        if (enable) {
+            String before = "";
+            int index = detail.indexOf("?");
+            String current = detail.substring(0, index == -1 ? detail.length() : index);
+            if (current.indexOf("[") >= 0) {
+                current += "]";
+            }
+            if (taskDetails.containsKey(taskId)) {
+                before = taskDetails.get(taskId).trim();
+            }
+            if (StrUtil.isBlank(before)) {
+                before = "";
+            } else {
+                before += ",";
+            }
+            this.taskDetails.put(taskId, before + current);
+        }
+    }
+
+    public void tracePerfRecord(PerfRecord perfRecord) {
+        try {
+            if (enable) {
+                long curNanoTime = System.nanoTime();
+                //ArrayList非线程安全
+                switch (perfRecord.getAction()) {
+                    case end:
+                        synchronized (totalEndReport) {
+                            totalEndReport.add(perfRecord);
+
+                            if (totalEndReport.size() > batchSize * 10) {
+                                sumPerf4EndPrint(totalEndReport);
+                            }
+                        }
+
+                        if (perfReportEnable && needReport(perfRecord)) {
+                            synchronized (needReportPool4NotEnd) {
+                                sumPerf4Report.add(curNanoTime,perfRecord);
+                                needReportPool4NotEnd.remove(perfRecord);
+                            }
+                        }
+
+                        break;
+                    case start:
+                        if (perfReportEnable && needReport(perfRecord)) {
+                            synchronized (needReportPool4NotEnd) {
+                                needReportPool4NotEnd.add(perfRecord);
+                            }
+                        }
+                        break;
+                }
+            }
+        } catch (Exception e) {
+            // do nothing
+        }
+    }
+
+    private boolean needReport(PerfRecord perfRecord) {
+        switch (perfRecord.getPhase()) {
+            case TASK_TOTAL:
+            case SQL_QUERY:
+            case RESULT_NEXT_ALL:
+            case ODPS_BLOCK_CLOSE:
+                return true;
+        }
+        return false;
+    }
+
+    public String summarizeNoException() {
+        String res;
+        try {
+            res = summarize();
+        } catch (Exception e) {
+            res = "PerfTrace summarize has Exception " + e.getMessage();
+        }
+        return res;
+    }
+
+    //任务结束时,对当前的perf总汇总统计
+    private synchronized String summarize() {
+        if (!enable) {
+            return "PerfTrace not enable!";
+        }
+
+        if (totalEndReport.size() > 0) {
+            sumPerf4EndPrint(totalEndReport);
+        }
+
+        StringBuilder info = new StringBuilder();
+        info.append("\n === total summarize info === \n");
+        info.append("\n   1. all phase average time info and max time task info: \n\n");
+        info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %-100s\n", "PHASE", "AVERAGE USED TIME", "ALL TASK NUM", "MAX USED TIME", "MAX TASK ID", "MAX TASK INFO"));
+
+        List<PHASE> keys = new ArrayList<PHASE>(perfRecordMaps4print.keySet());
+        Collections.sort(keys, new Comparator<PHASE>() {
+            @Override
+            public int compare(PHASE o1, PHASE o2) {
+                return o1.toInt() - o2.toInt();
+            }
+        });
+        for (PHASE phase : keys) {
+            SumPerfRecord4Print sumPerfRecord = perfRecordMaps4print.get(phase);
+            if (sumPerfRecord == null) {
+                continue;
+            }
+            long averageTime = sumPerfRecord.getAverageTime();
+            long maxTime = sumPerfRecord.getMaxTime();
+            int maxTaskId = sumPerfRecord.maxTaskId;
+            int maxTaskGroupId = sumPerfRecord.getMaxTaskGroupId();
+            info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %-100s\n",
+                    phase, unitTime(averageTime), sumPerfRecord.totalCount, unitTime(maxTime), jobId + "-" + maxTaskGroupId + "-" + maxTaskId, taskDetails.get(maxTaskId)));
+        }
+
+        //SumPerfRecord4Print countSumPerf = Optional.fromNullable(perfRecordMaps4print.get(PHASE.READ_TASK_DATA)).or(new SumPerfRecord4Print());
+
+        SumPerfRecord4Print countSumPerf = perfRecordMaps4print.get(PHASE.READ_TASK_DATA);
+        if(countSumPerf == null){
+            countSumPerf = new SumPerfRecord4Print();
+        }
+
+        long averageRecords = countSumPerf.getAverageRecords();
+        long averageBytes = countSumPerf.getAverageBytes();
+        long maxRecord = countSumPerf.getMaxRecord();
+        long maxByte = countSumPerf.getMaxByte();
+        int maxTaskId4Records = countSumPerf.getMaxTaskId4Records();
+        int maxTGID4Records = countSumPerf.getMaxTGID4Records();
+
+        info.append("\n\n 2. record average count and max count task info :\n\n");
+        info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %18s | %-100s\n", "PHASE", "AVERAGE RECORDS", "AVERAGE BYTES", "MAX RECORDS", "MAX RECORD`S BYTES", "MAX TASK ID", "MAX TASK INFO"));
+        if (maxTaskId4Records > -1) {
+            info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %18s | %-100s\n"
+                    , PHASE.READ_TASK_DATA, averageRecords, unitSize(averageBytes), maxRecord, unitSize(maxByte), jobId + "-" + maxTGID4Records + "-" + maxTaskId4Records, taskDetails.get(maxTaskId4Records)));
+
+        }
+        return info.toString();
+    }
+
+    //缺省传入的时间是nano
+    public static String unitTime(long time) {
+        return unitTime(time, TimeUnit.NANOSECONDS);
+    }
+
+    public static String unitTime(long time, TimeUnit timeUnit) {
+        return String.format("%,.3fs", ((float) timeUnit.toNanos(time)) / 1000000000);
+    }
+
+    public static String unitSize(long size) {
+        if (size > 1000000000) {
+            return String.format("%,.2fG", (float) size / 1000000000);
+        } else if (size > 1000000) {
+            return String.format("%,.2fM", (float) size / 1000000);
+        } else if (size > 1000) {
+            return String.format("%,.2fK", (float) size / 1000);
+        } else {
+            return size + "B";
+        }
+    }
+
+
+    public synchronized ConcurrentHashMap<PHASE, SumPerfRecord4Print> getPerfRecordMaps4print() {
+        if (totalEndReport.size() > 0) {
+            sumPerf4EndPrint(totalEndReport);
+        }
+        return perfRecordMaps4print;
+    }
+
+    public SumPerf4Report getSumPerf4Report() {
+        return sumPerf4Report;
+    }
+
+    public Set<PerfRecord> getNeedReportPool4NotEnd() {
+        return needReportPool4NotEnd;
+    }
+
+    public List<PerfRecord> getTotalEndReport() {
+        return totalEndReport;
+    }
+
+    public Map<Integer, String> getTaskDetails() {
+        return taskDetails;
+    }
+
+    public boolean isEnable() {
+        return enable;
+    }
+
+    public boolean isJob() {
+        return isJob;
+    }
+
+    private String cluster;
+    private String jobDomain;
+    private String srcType;
+    private String dstType;
+    private String srcGuid;
+    private String dstGuid;
+    private Date windowStart;
+    private Date windowEnd;
+    private Date jobStartTime;
+
+    public void setJobInfo(Configuration jobInfo, boolean perfReportEnable, int channelNumber) {
+        try {
+            this.jobInfo = jobInfo;
+            if (jobInfo != null && perfReportEnable) {
+
+                cluster = jobInfo.getString("cluster");
+
+                String srcDomain = jobInfo.getString("srcDomain", "null");
+                String dstDomain = jobInfo.getString("dstDomain", "null");
+                jobDomain = srcDomain + "|" + dstDomain;
+                srcType = jobInfo.getString("srcType");
+                dstType = jobInfo.getString("dstType");
+                srcGuid = jobInfo.getString("srcGuid");
+                dstGuid = jobInfo.getString("dstGuid");
+                windowStart = getWindow(jobInfo.getString("windowStart"), true);
+                windowEnd = getWindow(jobInfo.getString("windowEnd"), false);
+                String jobIdStr = jobInfo.getString("jobId");
+                jobId = StrUtil.isEmpty(jobIdStr) ? (long) -5 : Long.parseLong(jobIdStr);
+                String jobVersionStr = jobInfo.getString("jobVersion");
+                jobVersion = StrUtil.isEmpty(jobVersionStr) ? (long) -4 : Long.parseLong(jobVersionStr);
+                jobStartTime = new Date();
+            }
+            this.perfReportEnable = perfReportEnable;
+            this.channelNumber = channelNumber;
+        } catch (Exception e) {
+            this.perfReportEnable = false;
+        }
+    }
+
+    private Date getWindow(String windowStr, boolean startWindow) {
+        SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        SimpleDateFormat sdf2 = new SimpleDateFormat("yyyy-MM-dd 00:00:00");
+        if (StrUtil.isNotEmpty(windowStr)) {
+            try {
+                return sdf1.parse(windowStr);
+            } catch (ParseException e) {
+                // do nothing
+            }
+        }
+
+        if (startWindow) {
+            try {
+                return sdf2.parse(sdf2.format(new Date()));
+            } catch (ParseException e1) {
+                //do nothing
+            }
+        }
+
+        return null;
+    }
+
+    public long getInstId() {
+        return instId;
+    }
+
+    public Configuration getJobInfo() {
+        return jobInfo;
+    }
+
+    public void setBatchSize(int batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    public synchronized JobStatisticsDto2 getReports(String mode) {
+
+        try {
+            if (!enable || !perfReportEnable) {
+                return null;
+            }
+
+            if (("job".equalsIgnoreCase(mode) && !isJob) || "tg".equalsIgnoreCase(mode) && isJob) {
+                return null;
+            }
+
+            //每次将未完成的task的统计清空
+            sumPerf4Report4NotEnd = new SumPerf4Report();
+            Set<PerfRecord> needReportPool4NotEndTmp = null;
+            synchronized (needReportPool4NotEnd) {
+                needReportPool4NotEndTmp = new HashSet<PerfRecord>(needReportPool4NotEnd);
+            }
+
+            long curNanoTime = System.nanoTime();
+            for (PerfRecord perfRecord : needReportPool4NotEndTmp) {
+                sumPerf4Report4NotEnd.add(curNanoTime, perfRecord);
+            }
+
+            JobStatisticsDto2 jdo = new JobStatisticsDto2();
+            jdo.setInstId(this.instId);
+            if (isJob) {
+                jdo.setTaskGroupId(-6);
+            } else {
+                jdo.setTaskGroupId(this.taskGroupId);
+            }
+            jdo.setJobId(this.jobId);
+            jdo.setJobVersion(this.jobVersion);
+            jdo.setWindowStart(this.windowStart);
+            jdo.setWindowEnd(this.windowEnd);
+            jdo.setJobStartTime(jobStartTime);
+            jdo.setJobRunTimeMs(System.currentTimeMillis() - jobStartTime.getTime());
+            jdo.setJobPriority(this.priority);
+            jdo.setChannelNum(this.channelNumber);
+            jdo.setCluster(this.cluster);
+            jdo.setJobDomain(this.jobDomain);
+            jdo.setSrcType(this.srcType);
+            jdo.setDstType(this.dstType);
+            jdo.setSrcGuid(this.srcGuid);
+            jdo.setDstGuid(this.dstGuid);
+            jdo.setHostAddress(NetUtil.getLocalhostStr());
+
+            //sum
+            jdo.setTaskTotalTimeMs(sumPerf4Report4NotEnd.totalTaskRunTimeInMs + sumPerf4Report.totalTaskRunTimeInMs);
+            jdo.setOdpsBlockCloseTimeMs(sumPerf4Report4NotEnd.odpsCloseTimeInMs + sumPerf4Report.odpsCloseTimeInMs);
+            jdo.setSqlQueryTimeMs(sumPerf4Report4NotEnd.sqlQueryTimeInMs + sumPerf4Report.sqlQueryTimeInMs);
+            jdo.setResultNextTimeMs(sumPerf4Report4NotEnd.resultNextTimeInMs + sumPerf4Report.resultNextTimeInMs);
+
+            return jdo;
+        } catch (Exception e) {
+            // do nothing
+        }
+
+        return null;
+    }
+
+    private void sumPerf4EndPrint(List<PerfRecord> totalEndReport) {
+        if (!enable || totalEndReport == null) {
+            return;
+        }
+
+        for (PerfRecord perfRecord : totalEndReport) {
+            perfRecordMaps4print.putIfAbsent(perfRecord.getPhase(), new SumPerfRecord4Print());
+            perfRecordMaps4print.get(perfRecord.getPhase()).add(perfRecord);
+        }
+
+        totalEndReport.clear();
+    }
+
+    public void setChannelNumber(int needChannelNumber) {
+        this.channelNumber = needChannelNumber;
+    }
+
+
+    public static class SumPerf4Report {
+        long totalTaskRunTimeInMs = 0L;
+        long odpsCloseTimeInMs = 0L;
+        long sqlQueryTimeInMs = 0L;
+        long resultNextTimeInMs = 0L;
+
+        public void add(long curNanoTime,PerfRecord perfRecord) {
+            try {
+                long runTimeEndInMs;
+                if (perfRecord.getElapsedTimeInNs() == -1) {
+                    runTimeEndInMs = (curNanoTime - perfRecord.getStartTimeInNs()) / 1000000;
+                } else {
+                    runTimeEndInMs = perfRecord.getElapsedTimeInNs() / 1000000;
+                }
+                switch (perfRecord.getPhase()) {
+                    case TASK_TOTAL:
+                        totalTaskRunTimeInMs += runTimeEndInMs;
+                        break;
+                    case SQL_QUERY:
+                        sqlQueryTimeInMs += runTimeEndInMs;
+                        break;
+                    case RESULT_NEXT_ALL:
+                        resultNextTimeInMs += runTimeEndInMs;
+                        break;
+                    case ODPS_BLOCK_CLOSE:
+                        odpsCloseTimeInMs += runTimeEndInMs;
+                        break;
+                }
+            }catch (Exception e){
+                //do nothing
+            }
+        }
+
+        public long getTotalTaskRunTimeInMs() {
+            return totalTaskRunTimeInMs;
+        }
+
+        public long getOdpsCloseTimeInMs() {
+            return odpsCloseTimeInMs;
+        }
+
+        public long getSqlQueryTimeInMs() {
+            return sqlQueryTimeInMs;
+        }
+
+        public long getResultNextTimeInMs() {
+            return resultNextTimeInMs;
+        }
+    }
+
+    public static class SumPerfRecord4Print {
+        private long perfTimeTotal = 0;
+        private long averageTime = 0;
+        private long maxTime = 0;
+        private int maxTaskId = -1;
+        private int maxTaskGroupId = -1;
+        private int totalCount = 0;
+
+        private long recordsTotal = 0;
+        private long sizesTotal = 0;
+        private long averageRecords = 0;
+        private long averageBytes = 0;
+        private long maxRecord = 0;
+        private long maxByte = 0;
+        private int maxTaskId4Records = -1;
+        private int maxTGID4Records = -1;
+
+        public void add(PerfRecord perfRecord) {
+            if (perfRecord == null) {
+                return;
+            }
+            perfTimeTotal += perfRecord.getElapsedTimeInNs();
+            if (perfRecord.getElapsedTimeInNs() >= maxTime) {
+                maxTime = perfRecord.getElapsedTimeInNs();
+                maxTaskId = perfRecord.getTaskId();
+                maxTaskGroupId = perfRecord.getTaskGroupId();
+            }
+
+            recordsTotal += perfRecord.getCount();
+            sizesTotal += perfRecord.getSize();
+            if (perfRecord.getCount() >= maxRecord) {
+                maxRecord = perfRecord.getCount();
+                maxByte = perfRecord.getSize();
+                maxTaskId4Records = perfRecord.getTaskId();
+                maxTGID4Records = perfRecord.getTaskGroupId();
+            }
+
+            totalCount++;
+        }
+
+        public long getPerfTimeTotal() {
+            return perfTimeTotal;
+        }
+
+        public long getAverageTime() {
+            if (totalCount > 0) {
+                averageTime = perfTimeTotal / totalCount;
+            }
+            return averageTime;
+        }
+
+        public long getMaxTime() {
+            return maxTime;
+        }
+
+        public int getMaxTaskId() {
+            return maxTaskId;
+        }
+
+        public int getMaxTaskGroupId() {
+            return maxTaskGroupId;
+        }
+
+        public long getRecordsTotal() {
+            return recordsTotal;
+        }
+
+        public long getSizesTotal() {
+            return sizesTotal;
+        }
+
+        public long getAverageRecords() {
+            if (totalCount > 0) {
+                averageRecords = recordsTotal / totalCount;
+            }
+            return averageRecords;
+        }
+
+        public long getAverageBytes() {
+            if (totalCount > 0) {
+                averageBytes = sizesTotal / totalCount;
+            }
+            return averageBytes;
+        }
+
+        public long getMaxRecord() {
+            return maxRecord;
+        }
+
+        public long getMaxByte() {
+            return maxByte;
+        }
+
+        public int getMaxTaskId4Records() {
+            return maxTaskId4Records;
+        }
+
+        public int getMaxTGID4Records() {
+            return maxTGID4Records;
+        }
+
+        public int getTotalCount() {
+            return totalCount;
+        }
+    }
+    class JobStatisticsDto2 {
+
+        private Long id;
+        private Date gmtCreate;
+        private Date gmtModified;
+        private Long instId;
+        private Long jobId;
+        private Long jobVersion;
+        private Integer taskGroupId;
+        private Date windowStart;
+        private Date windowEnd;
+        private Date jobStartTime;
+        private Date jobEndTime;
+        private Long jobRunTimeMs;
+        private Integer jobPriority;
+        private Integer channelNum;
+        private String cluster;
+        private String jobDomain;
+        private String srcType;
+        private String dstType;
+        private String srcGuid;
+        private String dstGuid;
+        private Long records;
+        private Long bytes;
+        private Long speedRecord;
+        private Long speedByte;
+        private String stagePercent;
+        private Long errorRecord;
+        private Long errorBytes;
+        private Long waitReadTimeMs;
+        private Long waitWriteTimeMs;
+        private Long odpsBlockCloseTimeMs;
+        private Long sqlQueryTimeMs;
+        private Long resultNextTimeMs;
+        private Long taskTotalTimeMs;
+        private String hostAddress;
+
+        public Long getId() {
+            return id;
+        }
+
+        public Date getGmtCreate() {
+            return gmtCreate;
+        }
+
+        public Date getGmtModified() {
+            return gmtModified;
+        }
+
+        public Long getInstId() {
+            return instId;
+        }
+
+        public Long getJobId() {
+            return jobId;
+        }
+
+        public Long getJobVersion() {
+            return jobVersion;
+        }
+
+        public Integer getTaskGroupId() {
+            return taskGroupId;
+        }
+
+        public Date getWindowStart() {
+            return windowStart;
+        }
+
+        public Date getWindowEnd() {
+            return windowEnd;
+        }
+
+        public Date getJobStartTime() {
+            return jobStartTime;
+        }
+
+        public Date getJobEndTime() {
+            return jobEndTime;
+        }
+
+        public Long getJobRunTimeMs() {
+            return jobRunTimeMs;
+        }
+
+        public Integer getJobPriority() {
+            return jobPriority;
+        }
+
+        public Integer getChannelNum() {
+            return channelNum;
+        }
+
+        public String getCluster() {
+            return cluster;
+        }
+
+        public String getJobDomain() {
+            return jobDomain;
+        }
+
+        public String getSrcType() {
+            return srcType;
+        }
+
+        public String getDstType() {
+            return dstType;
+        }
+
+        public String getSrcGuid() {
+            return srcGuid;
+        }
+
+        public String getDstGuid() {
+            return dstGuid;
+        }
+
+        public Long getRecords() {
+            return records;
+        }
+
+        public Long getBytes() {
+            return bytes;
+        }
+
+        public Long getSpeedRecord() {
+            return speedRecord;
+        }
+
+        public Long getSpeedByte() {
+            return speedByte;
+        }
+
+        public String getStagePercent() {
+            return stagePercent;
+        }
+
+        public Long getErrorRecord() {
+            return errorRecord;
+        }
+
+        public Long getErrorBytes() {
+            return errorBytes;
+        }
+
+        public Long getWaitReadTimeMs() {
+            return waitReadTimeMs;
+        }
+
+        public Long getWaitWriteTimeMs() {
+            return waitWriteTimeMs;
+        }
+
+        public Long getOdpsBlockCloseTimeMs() {
+            return odpsBlockCloseTimeMs;
+        }
+
+        public Long getSqlQueryTimeMs() {
+            return sqlQueryTimeMs;
+        }
+
+        public Long getResultNextTimeMs() {
+            return resultNextTimeMs;
+        }
+
+        public Long getTaskTotalTimeMs() {
+            return taskTotalTimeMs;
+        }
+
+        public String getHostAddress() {
+            return hostAddress;
+        }
+
+        public void setId(Long id) {
+            this.id = id;
+        }
+
+        public void setGmtCreate(Date gmtCreate) {
+            this.gmtCreate = gmtCreate;
+        }
+
+        public void setGmtModified(Date gmtModified) {
+            this.gmtModified = gmtModified;
+        }
+
+        public void setInstId(Long instId) {
+            this.instId = instId;
+        }
+
+        public void setJobId(Long jobId) {
+            this.jobId = jobId;
+        }
+
+        public void setJobVersion(Long jobVersion) {
+            this.jobVersion = jobVersion;
+        }
+
+        public void setTaskGroupId(Integer taskGroupId) {
+            this.taskGroupId = taskGroupId;
+        }
+
+        public void setWindowStart(Date windowStart) {
+            this.windowStart = windowStart;
+        }
+
+        public void setWindowEnd(Date windowEnd) {
+            this.windowEnd = windowEnd;
+        }
+
+        public void setJobStartTime(Date jobStartTime) {
+            this.jobStartTime = jobStartTime;
+        }
+
+        public void setJobEndTime(Date jobEndTime) {
+            this.jobEndTime = jobEndTime;
+        }
+
+        public void setJobRunTimeMs(Long jobRunTimeMs) {
+            this.jobRunTimeMs = jobRunTimeMs;
+        }
+
+        public void setJobPriority(Integer jobPriority) {
+            this.jobPriority = jobPriority;
+        }
+
+        public void setChannelNum(Integer channelNum) {
+            this.channelNum = channelNum;
+        }
+
+        public void setCluster(String cluster) {
+            this.cluster = cluster;
+        }
+
+        public void setJobDomain(String jobDomain) {
+            this.jobDomain = jobDomain;
+        }
+
+        public void setSrcType(String srcType) {
+            this.srcType = srcType;
+        }
+
+        public void setDstType(String dstType) {
+            this.dstType = dstType;
+        }
+
+        public void setSrcGuid(String srcGuid) {
+            this.srcGuid = srcGuid;
+        }
+
+        public void setDstGuid(String dstGuid) {
+            this.dstGuid = dstGuid;
+        }
+
+        public void setRecords(Long records) {
+            this.records = records;
+        }
+
+        public void setBytes(Long bytes) {
+            this.bytes = bytes;
+        }
+
+        public void setSpeedRecord(Long speedRecord) {
+            this.speedRecord = speedRecord;
+        }
+
+        public void setSpeedByte(Long speedByte) {
+            this.speedByte = speedByte;
+        }
+
+        public void setStagePercent(String stagePercent) {
+            this.stagePercent = stagePercent;
+        }
+
+        public void setErrorRecord(Long errorRecord) {
+            this.errorRecord = errorRecord;
+        }
+
+        public void setErrorBytes(Long errorBytes) {
+            this.errorBytes = errorBytes;
+        }
+
+        public void setWaitReadTimeMs(Long waitReadTimeMs) {
+            this.waitReadTimeMs = waitReadTimeMs;
+        }
+
+        public void setWaitWriteTimeMs(Long waitWriteTimeMs) {
+            this.waitWriteTimeMs = waitWriteTimeMs;
+        }
+
+        public void setOdpsBlockCloseTimeMs(Long odpsBlockCloseTimeMs) {
+            this.odpsBlockCloseTimeMs = odpsBlockCloseTimeMs;
+        }
+
+        public void setSqlQueryTimeMs(Long sqlQueryTimeMs) {
+            this.sqlQueryTimeMs = sqlQueryTimeMs;
+        }
+
+        public void setResultNextTimeMs(Long resultNextTimeMs) {
+            this.resultNextTimeMs = resultNextTimeMs;
+        }
+
+        public void setTaskTotalTimeMs(Long taskTotalTimeMs) {
+            this.taskTotalTimeMs = taskTotalTimeMs;
+        }
+
+        public void setHostAddress(String hostAddress) {
+            this.hostAddress = hostAddress;
+        }
+    }
+}

+ 412 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/statistics/VMInfo.java

@@ -0,0 +1,412 @@
+package cn.tr.plugin.dataX.common.statistics;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.OperatingSystemMXBean;
+import java.lang.management.RuntimeMXBean;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by liqiang on 15/11/12.
+ */
+public class VMInfo {
+    private static final Logger LOG = LoggerFactory.getLogger(VMInfo.class);
+    static final long MB = 1024 * 1024;
+    static final long GB = 1024 * 1024 * 1024;
+    public static Object lock = new Object();
+    private static VMInfo vmInfo;
+
+    /**
+     * @return null or vmInfo. null is something error, job no care it.
+     */
+    public static VMInfo getVmInfo() {
+        if (vmInfo == null) {
+            synchronized (lock) {
+                if (vmInfo == null) {
+                    try {
+                        vmInfo = new VMInfo();
+                    } catch (Exception e) {
+                        LOG.warn("no need care, the fail is ignored : vmInfo init failed " + e.getMessage(), e);
+                    }
+                }
+            }
+
+        }
+        return vmInfo;
+    }
+
+    // 数据的MxBean
+    private final OperatingSystemMXBean osMXBean;
+    private final RuntimeMXBean runtimeMXBean;
+    private final List<GarbageCollectorMXBean> garbageCollectorMXBeanList;
+    private final List<MemoryPoolMXBean> memoryPoolMXBeanList;
+    /**
+     * 静态信息
+     */
+    private final String osInfo;
+    private final String jvmInfo;
+
+    /**
+     * cpu个数
+     */
+    private final int totalProcessorCount;
+
+    /**
+     * 机器的各个状态,用于中间打印和统计上报
+     */
+    private final PhyOSStatus startPhyOSStatus;
+    private final ProcessCpuStatus processCpuStatus = new ProcessCpuStatus();
+    private final ProcessGCStatus processGCStatus = new ProcessGCStatus();
+    private final ProcessMemoryStatus processMomoryStatus = new ProcessMemoryStatus();
+    //ms
+    private long lastUpTime = 0;
+    //nano
+    private long lastProcessCpuTime = 0;
+
+
+    private VMInfo() {
+        //初始化静态信息
+        osMXBean = java.lang.management.ManagementFactory.getOperatingSystemMXBean();
+        runtimeMXBean = java.lang.management.ManagementFactory.getRuntimeMXBean();
+        garbageCollectorMXBeanList = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans();
+        memoryPoolMXBeanList = java.lang.management.ManagementFactory.getMemoryPoolMXBeans();
+
+        osInfo = runtimeMXBean.getVmVendor() + " " + runtimeMXBean.getSpecVersion() + " " + runtimeMXBean.getVmVersion();
+        jvmInfo = osMXBean.getName() + " " + osMXBean.getArch() + " " + osMXBean.getVersion();
+        totalProcessorCount = osMXBean.getAvailableProcessors();
+
+        //构建startPhyOSStatus
+        startPhyOSStatus = new PhyOSStatus();
+        LOG.info("VMInfo# operatingSystem class => " + osMXBean.getClass().getName());
+        if (VMInfo.isSunOsMBean(osMXBean)) {
+            {
+                startPhyOSStatus.totalPhysicalMemory = VMInfo.getLongFromOperatingSystem(osMXBean, "getTotalPhysicalMemorySize");
+                startPhyOSStatus.freePhysicalMemory = VMInfo.getLongFromOperatingSystem(osMXBean, "getFreePhysicalMemorySize");
+                startPhyOSStatus.maxFileDescriptorCount = VMInfo.getLongFromOperatingSystem(osMXBean, "getMaxFileDescriptorCount");
+                startPhyOSStatus.currentOpenFileDescriptorCount = VMInfo.getLongFromOperatingSystem(osMXBean, "getOpenFileDescriptorCount");
+            }
+        }
+
+        //初始化processGCStatus;
+        for (GarbageCollectorMXBean garbage : garbageCollectorMXBeanList) {
+            GCStatus gcStatus = new GCStatus();
+            gcStatus.name = garbage.getName();
+            processGCStatus.gcStatusMap.put(garbage.getName(), gcStatus);
+        }
+
+        //初始化processMemoryStatus
+        if (memoryPoolMXBeanList != null && !memoryPoolMXBeanList.isEmpty()) {
+            for (MemoryPoolMXBean pool : memoryPoolMXBeanList) {
+                MemoryStatus memoryStatus = new MemoryStatus();
+                memoryStatus.name = pool.getName();
+                memoryStatus.initSize = pool.getUsage().getInit();
+                memoryStatus.maxSize = pool.getUsage().getMax();
+                processMomoryStatus.memoryStatusMap.put(pool.getName(), memoryStatus);
+            }
+        }
+    }
+
+    public String toString() {
+        return "the machine info  => \n\n"
+                + "\tosInfo:\t" + osInfo + "\n"
+                + "\tjvmInfo:\t" + jvmInfo + "\n"
+                + "\tcpu num:\t" + totalProcessorCount + "\n\n"
+                + startPhyOSStatus.toString() + "\n"
+                + processGCStatus.toString() + "\n"
+                + processMomoryStatus.toString() + "\n";
+    }
+
+    public String totalString() {
+        return (processCpuStatus.getTotalString() + processGCStatus.getTotalString());
+    }
+
+    public void getDelta() {
+        getDelta(true);
+    }
+
+    public synchronized void getDelta(boolean print) {
+
+        try {
+            if (VMInfo.isSunOsMBean(osMXBean)) {
+                long curUptime = runtimeMXBean.getUptime();
+                long curProcessTime = getLongFromOperatingSystem(osMXBean, "getProcessCpuTime");
+                //百分比, uptime是ms,processTime是nano
+                if ((curUptime > lastUpTime) && (curProcessTime >= lastProcessCpuTime)) {
+                    float curDeltaCpu = (float) (curProcessTime - lastProcessCpuTime) / ((curUptime - lastUpTime) * totalProcessorCount * 10000);
+                    processCpuStatus.setMaxMinCpu(curDeltaCpu);
+                    processCpuStatus.averageCpu = (float) curProcessTime / (curUptime * totalProcessorCount * 10000);
+
+                    lastUpTime = curUptime;
+                    lastProcessCpuTime = curProcessTime;
+                }
+            }
+
+            for (GarbageCollectorMXBean garbage : garbageCollectorMXBeanList) {
+
+                GCStatus gcStatus = processGCStatus.gcStatusMap.get(garbage.getName());
+                if (gcStatus == null) {
+                    gcStatus = new GCStatus();
+                    gcStatus.name = garbage.getName();
+                    processGCStatus.gcStatusMap.put(garbage.getName(), gcStatus);
+                }
+
+                long curTotalGcCount = garbage.getCollectionCount();
+                gcStatus.setCurTotalGcCount(curTotalGcCount);
+
+                long curtotalGcTime = garbage.getCollectionTime();
+                gcStatus.setCurTotalGcTime(curtotalGcTime);
+            }
+
+            if (memoryPoolMXBeanList != null && !memoryPoolMXBeanList.isEmpty()) {
+                for (MemoryPoolMXBean pool : memoryPoolMXBeanList) {
+
+                    MemoryStatus memoryStatus = processMomoryStatus.memoryStatusMap.get(pool.getName());
+                    if (memoryStatus == null) {
+                        memoryStatus = new MemoryStatus();
+                        memoryStatus.name = pool.getName();
+                        processMomoryStatus.memoryStatusMap.put(pool.getName(), memoryStatus);
+                    }
+                    memoryStatus.commitedSize = pool.getUsage().getCommitted();
+                    memoryStatus.setMaxMinUsedSize(pool.getUsage().getUsed());
+                    long maxMemory = memoryStatus.commitedSize > 0 ? memoryStatus.commitedSize : memoryStatus.maxSize;
+                    memoryStatus.setMaxMinPercent(maxMemory > 0 ? (float) 100 * memoryStatus.usedSize / maxMemory : -1);
+                }
+            }
+
+            if (print) {
+                LOG.info(processCpuStatus.getDeltaString() + processMomoryStatus.getDeltaString() + processGCStatus.getDeltaString());
+            }
+
+        } catch (Exception e) {
+            LOG.warn("no need care, the fail is ignored : vmInfo getDelta failed " + e.getMessage(), e);
+        }
+    }
+
+    public static boolean isSunOsMBean(OperatingSystemMXBean operatingSystem) {
+        final String className = operatingSystem.getClass().getName();
+
+        return "com.sun.management.UnixOperatingSystem".equals(className);
+    }
+
+    public static long getLongFromOperatingSystem(OperatingSystemMXBean operatingSystem, String methodName) {
+        try {
+            final Method method = operatingSystem.getClass().getMethod(methodName, (Class<?>[]) null);
+            method.setAccessible(true);
+            return (Long) method.invoke(operatingSystem, (Object[]) null);
+        } catch (final Exception e) {
+            LOG.info(String.format("OperatingSystemMXBean %s failed, Exception = %s ", methodName, e.getMessage()));
+        }
+
+        return -1;
+    }
+
+    private class PhyOSStatus {
+        long totalPhysicalMemory = -1;
+        long freePhysicalMemory = -1;
+        long maxFileDescriptorCount = -1;
+        long currentOpenFileDescriptorCount = -1;
+
+        public String toString() {
+            return String.format("\ttotalPhysicalMemory:\t%,.2fG\n"
+                            + "\tfreePhysicalMemory:\t%,.2fG\n"
+                            + "\tmaxFileDescriptorCount:\t%s\n"
+                            + "\tcurrentOpenFileDescriptorCount:\t%s\n",
+                    (float) totalPhysicalMemory / GB, (float) freePhysicalMemory / GB, maxFileDescriptorCount, currentOpenFileDescriptorCount);
+        }
+    }
+
+    private class ProcessCpuStatus {
+        // 百分比的值 比如30.0 表示30.0%
+        float maxDeltaCpu = -1;
+        float minDeltaCpu = -1;
+        float curDeltaCpu = -1;
+        float averageCpu = -1;
+
+        public void setMaxMinCpu(float curCpu) {
+            this.curDeltaCpu = curCpu;
+            if (maxDeltaCpu < curCpu) {
+                maxDeltaCpu = curCpu;
+            }
+
+            if (minDeltaCpu == -1 || minDeltaCpu > curCpu) {
+                minDeltaCpu = curCpu;
+            }
+        }
+
+        public String getDeltaString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\n\t [delta cpu info] => \n");
+            sb.append("\t\t");
+            sb.append(String.format("%-30s | %-30s | %-30s | %-30s \n", "curDeltaCpu", "averageCpu", "maxDeltaCpu", "minDeltaCpu"));
+            sb.append("\t\t");
+            sb.append(String.format("%-30s | %-30s | %-30s | %-30s \n",
+                    String.format("%,.2f%%", processCpuStatus.curDeltaCpu),
+                    String.format("%,.2f%%", processCpuStatus.averageCpu),
+                    String.format("%,.2f%%", processCpuStatus.maxDeltaCpu),
+                    String.format("%,.2f%%\n", processCpuStatus.minDeltaCpu)));
+
+            return sb.toString();
+        }
+
+        public String getTotalString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\n\t [total cpu info] => \n");
+            sb.append("\t\t");
+            sb.append(String.format("%-30s | %-30s | %-30s \n", "averageCpu", "maxDeltaCpu", "minDeltaCpu"));
+            sb.append("\t\t");
+            sb.append(String.format("%-30s | %-30s | %-30s \n",
+                    String.format("%,.2f%%", processCpuStatus.averageCpu),
+                    String.format("%,.2f%%", processCpuStatus.maxDeltaCpu),
+                    String.format("%,.2f%%\n", processCpuStatus.minDeltaCpu)));
+
+            return sb.toString();
+        }
+
+    }
+
+    private class ProcessGCStatus {
+        final Map<String, GCStatus> gcStatusMap = new HashMap<String, GCStatus>();
+
+        public String toString() {
+            return "\tGC Names\t" + gcStatusMap.keySet() + "\n";
+        }
+
+        public String getDeltaString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\n\t [delta gc info] => \n");
+            sb.append("\t\t ");
+            sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", "NAME", "curDeltaGCCount", "totalGCCount", "maxDeltaGCCount", "minDeltaGCCount", "curDeltaGCTime", "totalGCTime", "maxDeltaGCTime", "minDeltaGCTime"));
+            for (GCStatus gc : gcStatusMap.values()) {
+                sb.append("\t\t ");
+                sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n",
+                        gc.name, gc.curDeltaGCCount, gc.totalGCCount, gc.maxDeltaGCCount, gc.minDeltaGCCount,
+                        String.format("%,.3fs",(float)gc.curDeltaGCTime/1000),
+                        String.format("%,.3fs",(float)gc.totalGCTime/1000),
+                        String.format("%,.3fs",(float)gc.maxDeltaGCTime/1000),
+                        String.format("%,.3fs",(float)gc.minDeltaGCTime/1000)));
+
+            }
+            return sb.toString();
+        }
+
+        public String getTotalString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\n\t [total gc info] => \n");
+            sb.append("\t\t ");
+            sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", "NAME", "totalGCCount", "maxDeltaGCCount", "minDeltaGCCount", "totalGCTime", "maxDeltaGCTime", "minDeltaGCTime"));
+            for (GCStatus gc : gcStatusMap.values()) {
+                sb.append("\t\t ");
+                sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n",
+                        gc.name, gc.totalGCCount, gc.maxDeltaGCCount, gc.minDeltaGCCount,
+                        String.format("%,.3fs",(float)gc.totalGCTime/1000),
+                        String.format("%,.3fs",(float)gc.maxDeltaGCTime/1000),
+                        String.format("%,.3fs",(float)gc.minDeltaGCTime/1000)));
+
+            }
+            return sb.toString();
+        }
+    }
+
+    private class ProcessMemoryStatus {
+        final Map<String, MemoryStatus> memoryStatusMap = new HashMap<String, MemoryStatus>();
+
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\t");
+            sb.append(String.format("%-30s | %-30s | %-30s \n", "MEMORY_NAME", "allocation_size", "init_size"));
+            for (MemoryStatus ms : memoryStatusMap.values()) {
+                sb.append("\t");
+                sb.append(String.format("%-30s | %-30s | %-30s \n",
+                        ms.name, String.format("%,.2fMB", (float) ms.maxSize / MB), String.format("%,.2fMB", (float) ms.initSize / MB)));
+            }
+            return sb.toString();
+        }
+
+        public String getDeltaString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("\n\t [delta memory info] => \n");
+            sb.append("\t\t ");
+            sb.append(String.format("%-30s | %-30s | %-30s | %-30s | %-30s \n", "NAME", "used_size", "used_percent", "max_used_size", "max_percent"));
+            for (MemoryStatus ms : memoryStatusMap.values()) {
+                sb.append("\t\t ");
+                sb.append(String.format("%-30s | %-30s | %-30s | %-30s | %-30s \n",
+                        ms.name, String.format("%,.2f", (float) ms.usedSize / MB) + "MB",
+                        String.format("%,.2f", (float) ms.percent) + "%",
+                        String.format("%,.2f", (float) ms.maxUsedSize / MB) + "MB",
+                        String.format("%,.2f", (float) ms.maxpercent) + "%"));
+
+            }
+            return sb.toString();
+        }
+    }
+
+    private class GCStatus {
+        String name;
+        long maxDeltaGCCount = -1;
+        long minDeltaGCCount = -1;
+        long curDeltaGCCount;
+        long totalGCCount = 0;
+        long maxDeltaGCTime = -1;
+        long minDeltaGCTime = -1;
+        long curDeltaGCTime;
+        long totalGCTime = 0;
+
+        public void setCurTotalGcCount(long curTotalGcCount) {
+            this.curDeltaGCCount = curTotalGcCount - totalGCCount;
+            this.totalGCCount = curTotalGcCount;
+
+            if (maxDeltaGCCount < curDeltaGCCount) {
+                maxDeltaGCCount = curDeltaGCCount;
+            }
+
+            if (minDeltaGCCount == -1 || minDeltaGCCount > curDeltaGCCount) {
+                minDeltaGCCount = curDeltaGCCount;
+            }
+        }
+
+        public void setCurTotalGcTime(long curTotalGcTime) {
+            this.curDeltaGCTime = curTotalGcTime - totalGCTime;
+            this.totalGCTime = curTotalGcTime;
+
+            if (maxDeltaGCTime < curDeltaGCTime) {
+                maxDeltaGCTime = curDeltaGCTime;
+            }
+
+            if (minDeltaGCTime == -1 || minDeltaGCTime > curDeltaGCTime) {
+                minDeltaGCTime = curDeltaGCTime;
+            }
+        }
+    }
+
+    private class MemoryStatus {
+        String name;
+        long initSize;
+        long maxSize;
+        long commitedSize;
+        long usedSize;
+        float percent;
+        long maxUsedSize = -1;
+        float maxpercent = 0;
+
+        void setMaxMinUsedSize(long curUsedSize) {
+            if (maxUsedSize < curUsedSize) {
+                maxUsedSize = curUsedSize;
+            }
+            this.usedSize = curUsedSize;
+        }
+
+        void setMaxMinPercent(float curPercent) {
+            if (maxpercent < curPercent) {
+                maxpercent = curPercent;
+            }
+            this.percent = curPercent;
+        }
+    }
+
+}

+ 1073 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/Configuration.java

@@ -0,0 +1,1073 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.io.FileUtil;
+import cn.hutool.core.io.IoUtil;
+import cn.hutool.core.util.ArrayUtil;
+import cn.hutool.json.JSONUtil;
+import cn.tr.core.exception.BaseCode;
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.hutool.core.util.StrUtil;
+import java.io.*;
+import java.nio.charset.Charset;
+import java.util.*;
+
+/**
+ * Configuration 提供多级JSON配置信息无损存储 <br>
+ * <br>
+ * <p/>
+ * 实例代码:<br>
+ * <p/>
+ * 获取job的配置信息<br>
+ * Configuration configuration = Configuration.from(new File("Config.json")); <br>
+ * String jobContainerClass =
+ * configuration.getString("core.container.job.class"); <br>
+ * <p/>
+ * <br>
+ * 设置多级List <br>
+ * configuration.set("job.reader.parameter.jdbcUrl", Arrays.asList(new String[]
+ * {"jdbc", "jdbc"}));
+ * <p/>
+ * <p/>
+ * <br>
+ * <br>
+ * 合并Configuration: <br>
+ * configuration.merge(another);
+ * <p/>
+ * <p/>
+ * <br>
+ * <br>
+ * <br>
+ * <p/>
+ * Configuration 存在两种较好地实现方式<br>
+ * 第一种是将JSON配置信息中所有的Key全部打平,用a.b.c的级联方式作为Map的Key,内部使用一个Map保存信息 <br>
+ * 第二种是将JSON的对象直接使用结构化树形结构保存<br>
+ * <p/>
+ * 目前使用的第二种实现方式,使用第一种的问题在于: <br>
+ * 1. 插入新对象,比较难处理,例如a.b.c="bazhen",此时如果需要插入a="bazhen",也即是根目录下第一层所有类型全部要废弃
+ * ,使用"bazhen"作为value,第一种方式使用字符串表示key,难以处理这类问题。 <br>
+ * 2. 返回树形结构,例如 a.b.c.d = "bazhen",如果返回"a"下的所有元素,实际上是一个Map,需要合并处理 <br>
+ * 3. 输出JSON,将上述对象转为JSON,要把上述Map的多级key转为树形结构,并输出为JSON <br>
+ */
+public class Configuration {
+
+    /**
+     * 对于加密的keyPath,需要记录下来
+     * 为的是后面分布式情况下将该值加密后抛到DataXServer中
+     */
+    private Set<String> secretKeyPathSet =
+            new HashSet<String>();
+
+    private Object root = null;
+
+    /**
+     * 初始化空白的Configuration
+     */
+    public static Configuration newDefault() {
+        return Configuration.from("{}");
+    }
+
+    /**
+     * 从JSON字符串加载Configuration
+     */
+    public static Configuration from(String json) {
+        json = cn.tr.plugin.dataX.common.util.StrUtil.replaceVariable(json);
+        checkJSON(json);
+
+        try {
+            return new Configuration(json);
+        } catch (Exception e) {
+            throw DataXException.asDataXException(TRExcCode.CONFIG_ERROR,
+                    e);
+        }
+
+    }
+
+    /**
+     * 从包括json的File对象加载Configuration
+     */
+    public static Configuration from(File file) {
+        return Configuration.from(IoUtil.read(FileUtil.getInputStream(file), Charset.defaultCharset()));
+    }
+
+    /**
+     * 从包括json的InputStream对象加载Configuration
+     */
+    public static Configuration from(InputStream is) {
+        return Configuration.from(IoUtil.read(is, Charset.defaultCharset()));
+    }
+
+    /**
+     * 从Map对象加载Configuration
+     */
+    public static Configuration from(final Map<String, Object> object) {
+        return Configuration.from(Configuration.toJSONString(object));
+    }
+
+    /**
+     * 从List对象加载Configuration
+     */
+    public static Configuration from(final List<Object> object) {
+        return Configuration.from(Configuration.toJSONString(object));
+    }
+
+    public String getNecessaryValue(String key, BaseCode errorCode) {
+        String value = this.getString(key, null);
+        if (StrUtil.isBlank(value)) {
+            throw DataXException.asDataXException(errorCode,
+                    String.format("您提供配置文件有误,[%s]是必填参数,不允许为空或者留白 .", key));
+        }
+
+        return value;
+    }
+
+    public String getUnnecessaryValue(String key, String defaultValue, BaseCode errorCode) {
+        String value = this.getString(key, defaultValue);
+        if (StrUtil.isBlank(value)) {
+            value = defaultValue;
+        }
+        return value;
+    }
+
+    public Boolean getNecessaryBool(String key, BaseCode errorCode) {
+        Boolean value = this.getBool(key);
+        if (value == null) {
+            throw DataXException.asDataXException(errorCode,
+                    String.format("您提供配置文件有误,[%s]是必填参数,不允许为空或者留白 .", key));
+        }
+
+        return value;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址具体的对象。
+     * <p/>
+     * <br>
+     * <p/>
+     * NOTE: 目前仅支持Map以及List下标寻址, 例如:
+     * <p/>
+     * <br />
+     * <p/>
+     * 对于如下JSON
+     * <p/>
+     * {"a": {"b": {"c": [0,1,2,3]}}}
+     * <p/>
+     * config.get("") 返回整个Map <br>
+     * config.get("a") 返回a下属整个Map <br>
+     * config.get("a.b.c") 返回c对应的数组List <br>
+     * config.get("a.b.c[0]") 返回数字0
+     *
+     * @return Java表示的JSON对象,如果path不存在或者对象不存在,均返回null。
+     */
+    public Object get(final String path) {
+        this.checkPath(path);
+        try {
+            return this.findObject(path);
+        } catch (Exception e) {
+            return null;
+        }
+    }
+
+    /**
+     * 用户指定部分path,获取Configuration的子集
+     * <p/>
+     * <br>
+     * 如果path获取的路径或者对象不存在,返回null
+     */
+    public Configuration getConfiguration(final String path) {
+        Object object = this.get(path);
+        if (null == object) {
+            return null;
+        }
+
+        return Configuration.from(Configuration.toJSONString(object));
+    }
+
+    /**
+     * 根据用户提供的json path,寻址String对象
+     *
+     * @return String对象,如果path不存在或者String不存在,返回null
+     */
+    public String getString(final String path) {
+        Object string = this.get(path);
+        if (null == string) {
+            return null;
+        }
+        return String.valueOf(string);
+    }
+
+    /**
+     * 根据用户提供的json path,寻址String对象,如果对象不存在,返回默认字符串
+     *
+     * @return String对象,如果path不存在或者String不存在,返回默认字符串
+     */
+    public String getString(final String path, final String defaultValue) {
+        String result = this.getString(path);
+
+        if (null == result) {
+            return defaultValue;
+        }
+
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Character对象
+     *
+     * @return Character对象,如果path不存在或者Character不存在,返回null
+     */
+    public Character getChar(final String path) {
+        String result = this.getString(path);
+        if (null == result) {
+            return null;
+        }
+
+        try {
+            if (StrUtil.isEmpty(path)) {
+                throw new IllegalArgumentException("The String must not be empty");
+            } else {
+                return path.charAt(0);
+            }
+        } catch (Exception e) {
+            throw DataXException.asDataXException(
+                    TRExcCode.CONFIG_ERROR,
+                    String.format("任务读取配置文件出错. 因为配置文件路径[%s] 值非法,期望是字符类型: %s. 请检查您的配置并作出修改.", path,
+                            e.getMessage()));
+        }
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Boolean对象,如果对象不存在,返回默认Character对象
+     *
+     * @return Character对象,如果path不存在或者Character不存在,返回默认Character对象
+     */
+    public Character getChar(final String path, char defaultValue) {
+        Character result = this.getChar(path);
+        if (null == result) {
+            return defaultValue;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Boolean对象
+     *
+     * @return Boolean对象,如果path值非true,false ,将报错.特别注意:当 path 不存在时,会返回:null.
+     */
+    public Boolean getBool(final String path) {
+        String result = this.getString(path);
+
+        if (null == result) {
+            return null;
+        } else if ("true".equalsIgnoreCase(result)) {
+            return Boolean.TRUE;
+        } else if ("false".equalsIgnoreCase(result)) {
+            return Boolean.FALSE;
+        } else {
+            throw DataXException.asDataXException(TRExcCode.CONFIG_ERROR,
+                    String.format("您提供的配置信息有误,因为从[%s]获取的值[%s]无法转换为bool类型. 请检查源表的配置并且做出相应的修改.",
+                            path, result));
+        }
+
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Boolean对象,如果对象不存在,返回默认Boolean对象
+     *
+     * @return Boolean对象,如果path不存在或者Boolean不存在,返回默认Boolean对象
+     */
+    public Boolean getBool(final String path, boolean defaultValue) {
+        Boolean result = this.getBool(path);
+        if (null == result) {
+            return defaultValue;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Integer对象
+     *
+     * @return Integer对象,如果path不存在或者Integer不存在,返回null
+     */
+    public Integer getInt(final String path) {
+        String result = this.getString(path);
+        if (null == result) {
+            return null;
+        }
+
+        try {
+            return Integer.valueOf(result);
+        } catch (Exception e) {
+            throw DataXException.asDataXException(
+                    TRExcCode.CONFIG_ERROR,
+                    String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是整数类型: %s. 请检查您的配置并作出修改.", path,
+                            e.getMessage()));
+        }
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Integer对象,如果对象不存在,返回默认Integer对象
+     *
+     * @return Integer对象,如果path不存在或者Integer不存在,返回默认Integer对象
+     */
+    public Integer getInt(final String path, int defaultValue) {
+        Integer object = this.getInt(path);
+        if (null == object) {
+            return defaultValue;
+        }
+        return object;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Long对象
+     *
+     * @return Long对象,如果path不存在或者Long不存在,返回null
+     */
+    public Long getLong(final String path) {
+        String result = this.getString(path);
+        if (StrUtil.isBlank(result)) {
+            return null;
+        }
+
+        try {
+            return Long.valueOf(result);
+        } catch (Exception e) {
+            throw DataXException.asDataXException(
+                    TRExcCode.CONFIG_ERROR,
+                    String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是整数类型: %s. 请检查您的配置并作出修改.", path,
+                            e.getMessage()));
+        }
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Long对象,如果对象不存在,返回默认Long对象
+     *
+     * @return Long对象,如果path不存在或者Integer不存在,返回默认Long对象
+     */
+    public Long getLong(final String path, long defaultValue) {
+        Long result = this.getLong(path);
+        if (null == result) {
+            return defaultValue;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Double对象
+     *
+     * @return Double对象,如果path不存在或者Double不存在,返回null
+     */
+    public Double getDouble(final String path) {
+        String result = this.getString(path);
+        if (StrUtil.isBlank(result)) {
+            return null;
+        }
+
+        try {
+            return Double.valueOf(result);
+        } catch (Exception e) {
+            throw DataXException.asDataXException(
+                    TRExcCode.CONFIG_ERROR,
+                    String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是浮点类型: %s. 请检查您的配置并作出修改.", path,
+                            e.getMessage()));
+        }
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Double对象,如果对象不存在,返回默认Double对象
+     *
+     * @return Double对象,如果path不存在或者Double不存在,返回默认Double对象
+     */
+    public Double getDouble(final String path, double defaultValue) {
+        Double result = this.getDouble(path);
+        if (null == result) {
+            return defaultValue;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址List对象,如果对象不存在,返回null
+     */
+    @SuppressWarnings("unchecked")
+    public List<Object> getList(final String path) {
+        List<Object> list = this.get(path, List.class);
+        if (null == list) {
+            return null;
+        }
+        return list;
+    }
+
+    public <T> List<T> getListWithJson(final String path, Class<T> t) {
+        Object object = this.get(path, List.class);
+        if (null == object) {
+            return null;
+        }
+        return JsonUtils.parseArray(JsonUtils.toJsonString(object),t);
+    }
+
+    /**
+     * 根据用户提供的json path,寻址List对象,如果对象不存在,返回null
+     */
+    @SuppressWarnings("unchecked")
+    public <T> List<T> getList(final String path, Class<T> t) {
+        Object object = this.get(path, List.class);
+        if (null == object) {
+            return null;
+        }
+
+        List<T> result = new ArrayList<T>();
+
+        List<Object> origin = (List<Object>) object;
+        for (final Object each : origin) {
+            result.add((T) each);
+        }
+
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址List对象,如果对象不存在,返回默认List
+     */
+    @SuppressWarnings("unchecked")
+    public List<Object> getList(final String path,
+                                final List<Object> defaultList) {
+        Object object = this.getList(path);
+        if (null == object) {
+            return defaultList;
+        }
+        return (List<Object>) object;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址List对象,如果对象不存在,返回默认List
+     */
+    public <T> List<T> getList(final String path, final List<T> defaultList,
+                               Class<T> t) {
+        List<T> list = this.getList(path, t);
+        if (null == list) {
+            return defaultList;
+        }
+        return list;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址包含Configuration的List,如果对象不存在,返回默认null
+     */
+    public List<Configuration> getListConfiguration(final String path) {
+        List<Object> lists = getList(path);
+        if (lists == null) {
+            return null;
+        }
+
+        List<Configuration> result = new ArrayList<Configuration>();
+        for (final Object object : lists) {
+            result.add(Configuration.from(Configuration.toJSONString(object)));
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回null
+     */
+    @SuppressWarnings("unchecked")
+    public Map<String, Object> getMap(final String path) {
+        Map<String, Object> result = this.get(path, Map.class);
+        if (null == result) {
+            return null;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回null;
+     */
+    @SuppressWarnings("unchecked")
+    public <T> Map<String, T> getMap(final String path, Class<T> t) {
+        Map<String, Object> map = this.get(path, Map.class);
+        if (null == map) {
+            return null;
+        }
+
+        Map<String, T> result = new HashMap<String, T>();
+        for (final String key : map.keySet()) {
+            result.put(key, (T) map.get(key));
+        }
+
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回默认map
+     */
+    @SuppressWarnings("unchecked")
+    public Map<String, Object> getMap(final String path,
+                                      final Map<String, Object> defaultMap) {
+        Object object = this.getMap(path);
+        if (null == object) {
+            return defaultMap;
+        }
+        return (Map<String, Object>) object;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回默认map
+     */
+    public <T> Map<String, T> getMap(final String path,
+                                     final Map<String, T> defaultMap, Class<T> t) {
+        Map<String, T> result = getMap(path, t);
+        if (null == result) {
+            return defaultMap;
+        }
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址包含Configuration的Map,如果对象不存在,返回默认null
+     */
+    @SuppressWarnings("unchecked")
+    public Map<String, Configuration> getMapConfiguration(final String path) {
+        Map<String, Object> map = this.get(path, Map.class);
+        if (null == map) {
+            return null;
+        }
+
+        Map<String, Configuration> result = new HashMap<String, Configuration>();
+        for (final String key : map.keySet()) {
+            result.put(key, Configuration.from(Configuration.toJSONString(map
+                    .get(key))));
+        }
+
+        return result;
+    }
+
+    /**
+     * 根据用户提供的json path,寻址具体的对象,并转为用户提供的类型
+     * <p/>
+     * <br>
+     * <p/>
+     * NOTE: 目前仅支持Map以及List下标寻址, 例如:
+     * <p/>
+     * <br />
+     * <p/>
+     * 对于如下JSON
+     * <p/>
+     * {"a": {"b": {"c": [0,1,2,3]}}}
+     * <p/>
+     * config.get("") 返回整个Map <br>
+     * config.get("a") 返回a下属整个Map <br>
+     * config.get("a.b.c") 返回c对应的数组List <br>
+     * config.get("a.b.c[0]") 返回数字0
+     *
+     * @return Java表示的JSON对象,如果转型失败,将抛出异常
+     */
+    @SuppressWarnings("unchecked")
+    public <T> T get(final String path, Class<T> clazz) {
+        this.checkPath(path);
+        return (T) this.get(path);
+    }
+
+    /**
+     * 格式化Configuration输出
+     */
+    public String beautify() {
+        return JSONUtil.toJsonPrettyStr(this.getInternal());
+    }
+
+    /**
+     * 根据用户提供的json path,插入指定对象,并返回之前存在的对象(如果存在)
+     * <p/>
+     * <br>
+     * <p/>
+     * 目前仅支持.以及数组下标寻址, 例如:
+     * <p/>
+     * <br />
+     * <p/>
+     * config.set("a.b.c[3]", object);
+     * <p/>
+     * <br>
+     * 对于插入对象,Configuration不做任何限制,但是请务必保证该对象是简单对象(包括Map<String,
+     * Object>、List<Object>),不要使用自定义对象,否则后续对于JSON序列化等情况会出现未定义行为。
+     *
+     * @param path
+     *            JSON path对象
+     * @param object
+     *            需要插入的对象
+     * @return Java表示的JSON对象
+     */
+    public Object set(final String path, final Object object) {
+        checkPath(path);
+
+        Object result = this.get(path);
+
+        setObject(path, extractConfiguration(object));
+
+        return result;
+    }
+
+    /**
+     * 获取Configuration下所有叶子节点的key
+     * <p/>
+     * <br>
+     * <p/>
+     * 对于<br>
+     * <p/>
+     * {"a": {"b": {"c": [0,1,2,3]}}, "x": "y"}
+     * <p/>
+     * 下属的key包括: a.b.c[0],a.b.c[1],a.b.c[2],a.b.c[3],x
+     */
+    public Set<String> getKeys() {
+        Set<String> collect = new HashSet<String>();
+        this.getKeysRecursive(this.getInternal(), "", collect);
+        return collect;
+    }
+
+    /**
+     * 删除path对应的值,如果path不存在,将抛出异常。
+     */
+    public Object remove(final String path) {
+        final Object result = this.get(path);
+        if (null == result) {
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR,
+                    String.format("配置文件对应Key[%s]并不存在,该情况是代码编程错误. 请联系DataX团队的同学.", path));
+        }
+
+        this.set(path, null);
+        return result;
+    }
+
+    /**
+     * 合并其他Configuration,并修改两者冲突的KV配置
+     *
+     * @param another
+     *            合并加入的第三方Configuration
+     * @param updateWhenConflict
+     *            当合并双方出现KV冲突时候,选择更新当前KV,或者忽略该KV
+     * @return 返回合并后对象
+     */
+    public Configuration merge(final Configuration another,
+                               boolean updateWhenConflict) {
+        Set<String> keys = another.getKeys();
+
+        for (final String key : keys) {
+            // 如果使用更新策略,凡是another存在的key,均需要更新
+            if (updateWhenConflict) {
+                this.set(key, another.get(key));
+                continue;
+            }
+
+            // 使用忽略策略,只有another Configuration存在但是当前Configuration不存在的key,才需要更新
+            boolean isCurrentExists = this.get(key) != null;
+            if (isCurrentExists) {
+                continue;
+            }
+
+            this.set(key, another.get(key));
+        }
+        return this;
+    }
+
+    @Override
+    public String toString() {
+        return this.toJSON();
+    }
+
+    /**
+     * 将Configuration作为JSON输出
+     */
+    public String toJSON() {
+        return Configuration.toJSONString(this.getInternal());
+    }
+
+    /**
+     * 拷贝当前Configuration,注意,这里使用了深拷贝,避免冲突
+     */
+    public Configuration clone() {
+        Configuration config = Configuration
+                .from(Configuration.toJSONString(this.getInternal()));
+        config.addSecretKeyPath(this.secretKeyPathSet);
+        return config;
+    }
+
+    /**
+     * 按照configuration要求格式的path
+     * 比如:
+     * a.b.c
+     * a.b[2].c
+     * @param path
+     */
+    public void addSecretKeyPath(String path) {
+        if(StrUtil.isNotBlank(path)) {
+            this.secretKeyPathSet.add(path);
+        }
+    }
+
+    public void addSecretKeyPath(Set<String> pathSet) {
+        if(pathSet != null) {
+            this.secretKeyPathSet.addAll(pathSet);
+        }
+    }
+
+    public void setSecretKeyPathSet(Set<String> keyPathSet) {
+        if(keyPathSet != null) {
+            this.secretKeyPathSet = keyPathSet;
+        }
+    }
+
+    public boolean isSecretPath(String path) {
+        return this.secretKeyPathSet.contains(path);
+    }
+
+    @SuppressWarnings("unchecked")
+    void getKeysRecursive(final Object current, String path, Set<String> collect) {
+        boolean isRegularElement = !(current instanceof Map || current instanceof List);
+        if (isRegularElement) {
+            collect.add(path);
+            return;
+        }
+
+        boolean isMap = current instanceof Map;
+        if (isMap) {
+            Map<String, Object> mapping = ((Map<String, Object>) current);
+            for (final String key : mapping.keySet()) {
+                if (StrUtil.isBlank(path)) {
+                    getKeysRecursive(mapping.get(key), key.trim(), collect);
+                } else {
+                    getKeysRecursive(mapping.get(key), path + "." + key.trim(),
+                            collect);
+                }
+            }
+            return;
+        }
+
+        boolean isList = current instanceof List;
+        if (isList) {
+            List<Object> lists = (List<Object>) current;
+            for (int i = 0; i < lists.size(); i++) {
+                getKeysRecursive(lists.get(i), path + String.format("[%d]", i),
+                        collect);
+            }
+            return;
+        }
+
+        return;
+    }
+
+    public Object getInternal() {
+        return this.root;
+    }
+
+    private void setObject(final String path, final Object object) {
+        Object newRoot = setObjectRecursive(this.root, split2List(path), 0,
+                object);
+
+        if (isSuitForRoot(newRoot)) {
+            this.root = newRoot;
+            return;
+        }
+
+        throw DataXException.asDataXException(TRExcCode.RUNTIME_ERROR,
+                String.format("值[%s]无法适配您提供[%s], 该异常代表系统编程错误, 请联系DataX开发团队!",
+                        JsonUtils.toJsonString(object), path));
+    }
+
+    @SuppressWarnings("unchecked")
+    private Object extractConfiguration(final Object object) {
+        if (object instanceof Configuration) {
+            return extractFromConfiguration(object);
+        }
+
+        if (object instanceof List) {
+            List<Object> result = new ArrayList<Object>();
+            for (final Object each : (List<Object>) object) {
+                result.add(extractFromConfiguration(each));
+            }
+            return result;
+        }
+
+        if (object instanceof Map) {
+            Map<String, Object> result = new HashMap<String, Object>();
+            for (final String key : ((Map<String, Object>) object).keySet()) {
+                result.put(key,
+                        extractFromConfiguration(((Map<String, Object>) object)
+                                .get(key)));
+            }
+            return result;
+        }
+
+        return object;
+    }
+
+    private Object extractFromConfiguration(final Object object) {
+        if (object instanceof Configuration) {
+            return ((Configuration) object).getInternal();
+        }
+
+        return object;
+    }
+
+    Object buildObject(final List<String> paths, final Object object) {
+        if (null == paths) {
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR,
+                    "Path不能为null,该异常代表系统编程错误, 请联系DataX开发团队 !");
+        }
+
+        if (1 == paths.size() && StrUtil.isBlank(paths.get(0))) {
+            return object;
+        }
+
+        Object child = object;
+        for (int i = paths.size() - 1; i >= 0; i--) {
+            String path = paths.get(i);
+
+            if (isPathMap(path)) {
+                Map<String, Object> mapping = new HashMap<String, Object>();
+                mapping.put(path, child);
+                child = mapping;
+                continue;
+            }
+
+            if (isPathList(path)) {
+                List<Object> lists = new ArrayList<Object>(
+                        this.getIndex(path) + 1);
+                expand(lists, this.getIndex(path) + 1);
+                lists.set(this.getIndex(path), child);
+                child = lists;
+                continue;
+            }
+
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR, String.format(
+                            "路径[%s]出现非法值类型[%s],该异常代表系统编程错误, 请联系DataX开发团队! .",
+                            ArrayUtil.join(paths, "."), path));
+        }
+
+        return child;
+    }
+
+    @SuppressWarnings("unchecked")
+    Object setObjectRecursive(Object current, final List<String> paths,
+                              int index, final Object value) {
+
+        // 如果是已经超出path,我们就返回value即可,作为最底层叶子节点
+        boolean isLastIndex = index == paths.size();
+        if (isLastIndex) {
+            return value;
+        }
+
+        String path = paths.get(index).trim();
+        boolean isNeedMap = isPathMap(path);
+        if (isNeedMap) {
+            Map<String, Object> mapping;
+
+            // 当前不是map,因此全部替换为map,并返回新建的map对象
+            boolean isCurrentMap = current instanceof Map;
+            if (!isCurrentMap) {
+                mapping = new HashMap<String, Object>();
+                mapping.put(
+                        path,
+                        buildObject(paths.subList(index + 1, paths.size()),
+                                value));
+                return mapping;
+            }
+
+            // 当前是map,但是没有对应的key,也就是我们需要新建对象插入该map,并返回该map
+            mapping = ((Map<String, Object>) current);
+            boolean hasSameKey = mapping.containsKey(path);
+            if (!hasSameKey) {
+                mapping.put(
+                        path,
+                        buildObject(paths.subList(index + 1, paths.size()),
+                                value));
+                return mapping;
+            }
+
+            // 当前是map,而且还竟然存在这个值,好吧,继续递归遍历
+            current = mapping.get(path);
+            mapping.put(path,
+                    setObjectRecursive(current, paths, index + 1, value));
+            return mapping;
+        }
+
+        boolean isNeedList = isPathList(path);
+        if (isNeedList) {
+            List<Object> lists;
+            int listIndexer = getIndex(path);
+
+            // 当前是list,直接新建并返回即可
+            boolean isCurrentList = current instanceof List;
+            if (!isCurrentList) {
+                lists = expand(new ArrayList<Object>(), listIndexer + 1);
+                lists.set(
+                        listIndexer,
+                        buildObject(paths.subList(index + 1, paths.size()),
+                                value));
+                return lists;
+            }
+
+            // 当前是list,但是对应的indexer是没有具体的值,也就是我们新建对象然后插入到该list,并返回该List
+            lists = (List<Object>) current;
+            lists = expand(lists, listIndexer + 1);
+
+            boolean hasSameIndex = lists.get(listIndexer) != null;
+            if (!hasSameIndex) {
+                lists.set(
+                        listIndexer,
+                        buildObject(paths.subList(index + 1, paths.size()),
+                                value));
+                return lists;
+            }
+
+            // 当前是list,并且存在对应的index,没有办法继续递归寻找
+            current = lists.get(listIndexer);
+            lists.set(listIndexer,
+                    setObjectRecursive(current, paths, index + 1, value));
+            return lists;
+        }
+
+        throw DataXException.asDataXException(TRExcCode.RUNTIME_ERROR,
+                "该异常代表系统编程错误, 请联系DataX开发团队 !");
+    }
+
+    private Object findObject(final String path) {
+        boolean isRootQuery = StrUtil.isBlank(path);
+        if (isRootQuery) {
+            return this.root;
+        }
+
+        Object target = this.root;
+
+        for (final String each : split2List(path)) {
+            if (isPathMap(each)) {
+                target = findObjectInMap(target, each);
+                continue;
+            } else {
+                target = findObjectInList(target, each);
+                continue;
+            }
+        }
+
+        return target;
+    }
+
+    @SuppressWarnings("unchecked")
+    private Object findObjectInMap(final Object target, final String index) {
+        boolean isMap = (target instanceof Map);
+        if (!isMap) {
+            throw new IllegalArgumentException(String.format(
+                    "您提供的配置文件有误. 路径[%s]需要配置Json格式的Map对象,但该节点发现实际类型是[%s]. 请检查您的配置并作出修改.",
+                    index, target.getClass().toString()));
+        }
+
+        Object result = ((Map<String, Object>) target).get(index);
+        if (null == result) {
+            throw new IllegalArgumentException(String.format(
+                    "您提供的配置文件有误. 路径[%s]值为null,datax无法识别该配置. 请检查您的配置并作出修改.", index));
+        }
+
+        return result;
+    }
+
+    @SuppressWarnings({ "unchecked" })
+    private Object findObjectInList(final Object target, final String each) {
+        boolean isList = (target instanceof List);
+        if (!isList) {
+            throw new IllegalArgumentException(String.format(
+                    "您提供的配置文件有误. 路径[%s]需要配置Json格式的Map对象,但该节点发现实际类型是[%s]. 请检查您的配置并作出修改.",
+                    each, target.getClass().toString()));
+        }
+
+        String index = each.replace("[", "").replace("]", "");
+        if (!StrUtil.isNumeric(index)) {
+            throw new IllegalArgumentException(
+                    String.format(
+                            "系统编程错误,列表下标必须为数字类型,但该节点发现实际类型是[%s] ,该异常代表系统编程错误, 请联系DataX开发团队 !",
+                            index));
+        }
+
+        return ((List<Object>) target).get(Integer.valueOf(index));
+    }
+
+    private List<Object> expand(List<Object> list, int size) {
+        int expand = size - list.size();
+        while (expand-- > 0) {
+            list.add(null);
+        }
+        return list;
+    }
+
+    private boolean isPathList(final String path) {
+        return path.contains("[") && path.contains("]");
+    }
+
+    private boolean isPathMap(final String path) {
+        return StrUtil.isNotBlank(path) && !isPathList(path);
+    }
+
+    private int getIndex(final String index) {
+        return Integer.valueOf(index.replace("[", "").replace("]", ""));
+    }
+
+    private boolean isSuitForRoot(final Object object) {
+        if (null != object && (object instanceof List || object instanceof Map)) {
+            return true;
+        }
+
+        return false;
+    }
+
+    private String split(final String path) {
+        return StrUtil.replace(path, "[", ".[");
+    }
+
+    private List<String> split2List(final String path) {
+        return  StrUtil.split(split(path), ".");
+    }
+
+    private void checkPath(final String path) {
+        if (null == path) {
+            throw new IllegalArgumentException(
+                    "系统编程错误, 该异常代表系统编程错误, 请联系DataX开发团队!.");
+        }
+
+        for (final String each : StrUtil.split(path,".")) {
+            if (StrUtil.isBlank(each)) {
+                throw new IllegalArgumentException(String.format(
+                        "系统编程错误, 路径[%s]不合法, 路径层次之间不能出现空白字符 .", path));
+            }
+        }
+    }
+
+    @SuppressWarnings("unused")
+    private String toJSONPath(final String path) {
+        return (StrUtil.isBlank(path) ? "$" : "$." + path).replace("$.[",
+                "$[");
+    }
+
+    private static void checkJSON(final String json) {
+        if (StrUtil.isBlank(json)) {
+            throw DataXException.asDataXException(TRExcCode.CONFIG_ERROR,
+                    "配置信息错误. 因为您提供的配置信息不是合法的JSON格式, JSON不能为空白. 请按照标准json格式提供配置信息. ");
+        }
+    }
+
+    private Configuration(final String json) {
+        try {
+            this.root = JSONUtil.parse(json);
+        } catch (Exception e) {
+            throw DataXException.asDataXException(TRExcCode.CONFIG_ERROR,
+                    String.format("配置信息错误. 您提供的配置信息不是合法的JSON格式: %s . 请按照标准json格式提供配置信息. ", e.getMessage()));
+        }
+    }
+
+    private static String toJSONString(final Object object) {
+        return JsonUtils.toJsonString(object);
+    }
+
+    public Set<String> getSecretKeyPathSet() {
+        return secretKeyPathSet;
+    }
+}

+ 37 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/ConfigurationUtil.java

@@ -0,0 +1,37 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.util.StrUtil;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+
+public class ConfigurationUtil {
+    private static final List<String> SENSITIVE_KEYS = Arrays.asList("password", "accessKey", "securityToken",
+        "AccessKeyId", "AccessKeySecert", "AccessKeySecret", "clientPassword");
+
+    public static Configuration filterSensitive(Configuration origin) {
+        // shell 任务configuration metric 可能为null。
+        if (origin == null) {
+            return origin;
+        }
+        // 确保不影响入参的对象
+        Configuration configuration = origin.clone();
+        Set<String> keys = configuration.getKeys();
+        for (final String key : keys) {
+            boolean isSensitive = false;
+            for (String sensitiveKey : SENSITIVE_KEYS) {
+                if (StrUtil.endWithIgnoreCase(key, sensitiveKey)) {
+                    isSensitive = true;
+                    break;
+                }
+            }
+
+            if (isSensitive && configuration.get(key) instanceof String) {
+                configuration.set(key, configuration.getString(key).replaceAll(".", "*"));
+            }
+
+        }
+        return configuration;
+    }
+}

+ 228 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/DESCipher.java

@@ -0,0 +1,228 @@
+/**
+ *  (C) 2010-2022 Alibaba Group Holding Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package cn.tr.plugin.dataX.common.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.crypto.Cipher;
+import javax.crypto.SecretKey;
+import javax.crypto.SecretKeyFactory;
+import javax.crypto.spec.DESKeySpec;
+import java.security.SecureRandom;
+
+/**
+ * DES加解密,支持与delphi交互(字符串编码需统一为UTF-8)
+ * 将这个工具类抽取到 common 中,方便后续代码复用
+ */
+public class DESCipher {
+	private static Logger LOGGER = LoggerFactory.getLogger(DESCipher.class);
+	/**
+	 * 密钥
+	 */
+	public static final String KEY = "";
+	private final static String DES = "DES";
+
+	/**
+	 * 加密
+	 * @param src 明文(字节) 
+	 * @param key 密钥,长度必须是8的倍数
+	 * @return 密文(字节)
+	 * @throws Exception
+	 */
+	public static byte[] encrypt(byte[] src, byte[] key) throws Exception {
+		// DES算法要求有一个可信任的随机数源
+		SecureRandom sr = new SecureRandom();
+		
+		// 从原始密匙数据创建DESKeySpec对象
+		DESKeySpec dks = new DESKeySpec(key);
+		
+		// 创建一个密匙工厂,然后用它把DESKeySpec转换成
+		// 一个SecretKey对象
+		SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(DES);
+		SecretKey securekey = keyFactory.generateSecret(dks);
+		
+		// Cipher对象实际完成加密操作
+		Cipher cipher = Cipher.getInstance(DES);
+
+		// 用密匙初始化Cipher对象
+		cipher.init(Cipher.ENCRYPT_MODE, securekey, sr);
+
+		// 现在,获取数据并加密
+		// 正式执行加密操作
+		return cipher.doFinal(src);
+	}
+
+	/**
+	 * * 解密
+	 * * @param src
+	 * * 密文(字节)
+	 * * @param key
+	 * * 密钥,长度必须是8的倍数
+	 * * @return 明文(字节)
+	 * * @throws Exception
+	 */
+	public static byte[] decrypt(byte[] src, byte[] key) throws Exception {
+		// DES算法要求有一个可信任的随机数源
+		SecureRandom sr = new SecureRandom();
+
+		// 从原始密匙数据创建一个DESKeySpec对象
+		DESKeySpec dks = new DESKeySpec(key);
+
+		// 创建一个密匙工厂,然后用它把DESKeySpec对象转换成
+		// 一个SecretKey对象
+		SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(DES);
+		SecretKey securekey = keyFactory.generateSecret(dks);
+
+		// Cipher对象实际完成解密操作
+		Cipher cipher = Cipher.getInstance(DES);
+
+		// 用密匙初始化Cipher对象
+		cipher.init(Cipher.DECRYPT_MODE, securekey, sr);
+
+		// 现在,获取数据并解密
+		// 正式执行解密操作
+		return cipher.doFinal(src);
+	}
+
+	/**
+	 * 加密
+	 * @param src * 明文(字节)
+	 * @return 密文(字节)
+	 * @throws Exception
+	 */
+	public static byte[] encrypt(byte[] src) throws Exception {
+		return encrypt(src, KEY.getBytes());
+	}
+
+	/**
+	 * 解密
+	 * @param src 密文(字节)
+	 * @return 明文(字节)
+	 * @throws Exception
+	 */
+	public static byte[] decrypt(byte[] src) throws Exception {
+		return decrypt(src, KEY.getBytes());
+	}
+
+	/**
+	 * 加密
+	 * @param src 明文(字符串)
+	 * @return 密文(16进制字符串)
+	 * @throws Exception
+	 */
+	public final static String encrypt(String src) {
+		try {
+			return byte2hex(encrypt(src.getBytes(), KEY.getBytes()));
+		} catch (Exception e) {
+			LOGGER.warn(e.getMessage(), e);
+		}
+		return null;
+	}
+	
+	/**
+	 * 加密
+	 * @param src 明文(字符串)
+	 * @param encryptKey 加密用的秘钥
+	 * @return 密文(16进制字符串)
+	 * @throws Exception
+	 */
+	public final static String encrypt(String src, String encryptKey) {
+		try {
+			return byte2hex(encrypt(src.getBytes(), encryptKey.getBytes()));
+		} catch (Exception e) {
+			LOGGER.warn(e.getMessage(), e);
+		}
+		return null;
+	}
+
+	/**
+	 * 解密
+	 * @param src 密文(字符串)
+	 * @return 明文(字符串)
+	 * @throws Exception
+	 */
+	public final static String decrypt(String src) {
+		try {
+			return new String(decrypt(hex2byte(src.getBytes()), KEY.getBytes()));
+		} catch (Exception e) {
+			LOGGER.warn(e.getMessage(), e);
+		}
+		return null;
+	}
+	
+	/**
+	 * 解密
+	 * @param src 密文(字符串)
+	 * @param decryptKey 解密用的秘钥
+	 * @return 明文(字符串)
+	 * @throws Exception
+	 */
+	public final static String decrypt(String src, String decryptKey) {
+		try {
+			return new String(decrypt(hex2byte(src.getBytes()), decryptKey.getBytes()));
+		} catch (Exception e) {
+			LOGGER.warn(e.getMessage(), e);
+		}
+		return null;
+	}
+
+	/**
+	 * 加密
+	 * @param src
+	 * 明文(字节)
+	 * @return 密文(16进制字符串)
+	 * @throws Exception
+	 */
+	public static String encryptToString(byte[] src) throws Exception {
+		return encrypt(new String(src));
+	}
+
+	/**
+	 * 解密
+	 * @param src 密文(字节)
+	 * @return 明文(字符串)
+	 * @throws Exception
+	 */
+	public static String decryptToString(byte[] src) throws Exception {
+		return decrypt(new String(src));
+	}
+
+	public static String byte2hex(byte[] b) {
+		String hs = "";
+		String stmp = "";
+		for (int n = 0; n < b.length; n++) {
+			stmp = (Integer.toHexString(b[n] & 0XFF));
+			if (stmp.length() == 1)
+				hs = hs + "0" + stmp;
+			else
+				hs = hs + stmp;
+		}
+		return hs.toUpperCase();
+	}
+
+	public static byte[] hex2byte(byte[] b) {
+		if ((b.length % 2) != 0)
+			throw new IllegalArgumentException("The length is not an even number");
+		byte[] b2 = new byte[b.length / 2];
+		for (int n = 0; n < b.length; n += 2) {
+			String item = new String(b, n, 2);
+			b2[n / 2] = (byte) Integer.parseInt(item, 16);
+		}
+		return b2;
+	}
+}

+ 33 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/DataXCaseEnvUtil.java

@@ -0,0 +1,33 @@
+package cn.tr.plugin.dataX.common.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class DataXCaseEnvUtil {
+
+    private static final Logger LOGGER = LoggerFactory.getLogger(DataXCaseEnvUtil.class);
+
+    // datax回归测试效率提升
+    private static String DATAX_AUTOTEST_RETRY_TIME = System.getenv("DATAX_AUTOTEST_RETRY_TIME");
+    private static String DATAX_AUTOTEST_RETRY_INTERVAL = System.getenv("DATAX_AUTOTEST_RETRY_INTERVAL");
+    private static String DATAX_AUTOTEST_RETRY_EXPONENTIAL = System.getenv("DATAX_AUTOTEST_RETRY_EXPONENTIAL");
+
+    public static int getRetryTimes(int retryTimes) {
+        int actualRetryTimes = DATAX_AUTOTEST_RETRY_TIME != null ? Integer.valueOf(DATAX_AUTOTEST_RETRY_TIME) : retryTimes;
+       // LOGGER.info("The actualRetryTimes is {}", actualRetryTimes);
+        return actualRetryTimes;
+    }
+
+    public static long getRetryInterval(long retryInterval) {
+        long actualRetryInterval = DATAX_AUTOTEST_RETRY_INTERVAL != null ? Long.valueOf(DATAX_AUTOTEST_RETRY_INTERVAL) : retryInterval;
+       // LOGGER.info("The actualRetryInterval is {}", actualRetryInterval);
+        return actualRetryInterval;
+    }
+
+    public static boolean getRetryExponential(boolean retryExponential) {
+        boolean actualRetryExponential = DATAX_AUTOTEST_RETRY_EXPONENTIAL != null ? Boolean.valueOf(DATAX_AUTOTEST_RETRY_EXPONENTIAL) : retryExponential;
+       // LOGGER.info("The actualRetryExponential is {}", actualRetryExponential);
+        return actualRetryExponential;
+    }
+}

+ 53 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/FilterUtil.java

@@ -0,0 +1,53 @@
+package cn.tr.plugin.dataX.common.util;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+
+/**
+ * 提供从 List<String> 中根据 regular 过滤的通用工具(返回值已经去重). 使用场景,比如:odpsreader
+ * 的分区筛选,hdfsreader/txtfilereader的路径筛选等
+ */
+public final class FilterUtil {
+
+    //已经去重
+    public static List<String> filterByRegular(List<String> allStrs,
+                                               String regular) {
+        List<String> matchedValues = new ArrayList<String>();
+
+        // 语法习惯上的兼容处理(pt=* 实际正则应该是:pt=.*)
+        String newReqular = regular.replace(".*", "*").replace("*", ".*");
+
+        Pattern p = Pattern.compile(newReqular);
+
+        for (String partition : allStrs) {
+            if (p.matcher(partition).matches()) {
+                if (!matchedValues.contains(partition)) {
+                    matchedValues.add(partition);
+                }
+            }
+        }
+
+        return matchedValues;
+    }
+
+    //已经去重
+    public static List<String> filterByRegulars(List<String> allStrs,
+                                                List<String> regulars) {
+        List<String> matchedValues = new ArrayList<String>();
+
+        List<String> tempMatched = null;
+        for (String regular : regulars) {
+            tempMatched = filterByRegular(allStrs, regular);
+            if (null != tempMatched && !tempMatched.isEmpty()) {
+                for (String temp : tempMatched) {
+                    if (!matchedValues.contains(temp)) {
+                        matchedValues.add(temp);
+                    }
+                }
+            }
+        }
+
+        return matchedValues;
+    }
+}

+ 61 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/IdAndKeyRollingUtil.java

@@ -0,0 +1,61 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.util.StrUtil;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class IdAndKeyRollingUtil {
+	private static Logger LOGGER = LoggerFactory.getLogger(IdAndKeyRollingUtil.class);
+	public static final String SKYNET_ACCESSID = "SKYNET_ACCESSID";
+	public static final String SKYNET_ACCESSKEY = "SKYNET_ACCESSKEY";
+
+	public final static String ACCESS_ID = "accessId";
+	public final static String ACCESS_KEY = "accessKey";
+
+	public static String parseAkFromSkynetAccessKey() {
+		Map<String, String> envProp = System.getenv();
+		String skynetAccessID = envProp.get(IdAndKeyRollingUtil.SKYNET_ACCESSID);
+		String skynetAccessKey = envProp.get(IdAndKeyRollingUtil.SKYNET_ACCESSKEY);
+		String accessKey = null;
+		// follow 原有的判断条件
+		// 环境变量中,如果存在SKYNET_ACCESSID/SKYNET_ACCESSKEy(只要有其中一个变量,则认为一定是两个都存在的!
+		// if (StringUtils.isNotBlank(skynetAccessID) ||
+		// StringUtils.isNotBlank(skynetAccessKey)) {
+		// 检查严格,只有加密串不为空的时候才进去,不过 之前能跑的加密串都不应该为空
+		if (StrUtil.isNotBlank(skynetAccessKey)) {
+			LOGGER.info("Try to get accessId/accessKey from environment SKYNET_ACCESSKEY.");
+			accessKey = DESCipher.decrypt(skynetAccessKey);
+			if (StrUtil.isBlank(accessKey)) {
+				// 环境变量里面有,但是解析不到
+				throw DataXException.asDataXException(String.format(
+						"Failed to get the [accessId]/[accessKey] from the environment variable. The [accessId]=[%s]",
+						skynetAccessID));
+			}
+		}
+		if (StrUtil.isNotBlank(accessKey)) {
+			LOGGER.info("Get accessId/accessKey from environment variables SKYNET_ACCESSKEY successfully.");
+		}
+		return accessKey;
+	}
+
+	public static String getAccessIdAndKeyFromEnv(Configuration originalConfig) {
+		String accessId = null;
+		Map<String, String> envProp = System.getenv();
+		accessId = envProp.get(IdAndKeyRollingUtil.SKYNET_ACCESSID);
+		String accessKey = null;
+		if (StrUtil.isBlank(accessKey)) {
+			// 老的没有出异常,只是获取不到ak
+			accessKey = IdAndKeyRollingUtil.parseAkFromSkynetAccessKey();
+		}
+
+		if (StrUtil.isNotBlank(accessKey)) {
+			// 确认使用这个的都是 accessId、accessKey的命名习惯
+			originalConfig.set(IdAndKeyRollingUtil.ACCESS_ID, accessId);
+			originalConfig.set(IdAndKeyRollingUtil.ACCESS_KEY, accessKey);
+		}
+		return accessKey;
+	}
+}

+ 161 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/ListUtil.java

@@ -0,0 +1,161 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.util.ArrayUtil;
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+
+/**
+ * 提供针对 DataX中使用的 List 较为常见的一些封装。 比如:checkIfValueDuplicate 可以用于检查用户配置的 writer
+ * 的列不能重复。makeSureNoValueDuplicate亦然,只是会严格报错。
+ */
+public final class ListUtil {
+
+    public static boolean checkIfValueDuplicate(List<String> aList,
+                                                boolean caseSensitive) {
+        if (null == aList || aList.isEmpty()) {
+            throw DataXException.asDataXException(TRExcCode.CONFIG_ERROR,
+                    "您提供的作业配置有误,List不能为空.");
+        }
+
+        try {
+            makeSureNoValueDuplicate(aList, caseSensitive);
+        } catch (Exception e) {
+            return true;
+        }
+        return false;
+    }
+
+    public static void makeSureNoValueDuplicate(List<String> aList,
+                                                boolean caseSensitive) {
+        if (null == aList || aList.isEmpty()) {
+            throw new IllegalArgumentException("您提供的作业配置有误, List不能为空.");
+        }
+
+        if (1 == aList.size()) {
+            return;
+        } else {
+            List<String> list = null;
+            if (!caseSensitive) {
+                list = valueToLowerCase(aList);
+            } else {
+                list = new ArrayList<String>(aList);
+            }
+
+            Collections.sort(list);
+
+            for (int i = 0, len = list.size() - 1; i < len; i++) {
+                if (list.get(i).equals(list.get(i + 1))) {
+                    throw DataXException
+                            .asDataXException(
+                                    TRExcCode.CONFIG_ERROR,
+                                    String.format(
+                                            "您提供的作业配置信息有误, String:[%s] 不允许重复出现在列表中: [%s].",
+                                            list.get(i),
+                                            ArrayUtil.join(aList, ",")));
+                }
+            }
+        }
+    }
+
+    public static boolean checkIfBInA(List<String> aList, List<String> bList,
+                                      boolean caseSensitive) {
+        if (null == aList || aList.isEmpty() || null == bList
+                || bList.isEmpty()) {
+            throw new IllegalArgumentException("您提供的作业配置有误, List不能为空.");
+        }
+
+        try {
+            makeSureBInA(aList, bList, caseSensitive);
+        } catch (Exception e) {
+            return false;
+        }
+        return true;
+    }
+
+    public static void makeSureBInA(List<String> aList, List<String> bList,
+                                    boolean caseSensitive) {
+        if (null == aList || aList.isEmpty() || null == bList
+                || bList.isEmpty()) {
+            throw new IllegalArgumentException("您提供的作业配置有误, List不能为空.");
+        }
+
+        List<String> all = null;
+        List<String> part = null;
+
+        if (!caseSensitive) {
+            all = valueToLowerCase(aList);
+            part = valueToLowerCase(bList);
+        } else {
+            all = new ArrayList<String>(aList);
+            part = new ArrayList<String>(bList);
+        }
+
+        for (String oneValue : part) {
+            if (!all.contains(oneValue)) {
+                throw DataXException
+                        .asDataXException(
+                                TRExcCode.CONFIG_ERROR,
+                                String.format(
+                                        "您提供的作业配置信息有误, String:[%s] 不存在于列表中:[%s].",
+                                        oneValue, ArrayUtil.join(aList, ",")));
+            }
+        }
+
+    }
+
+    public static boolean checkIfValueSame(List<Boolean> aList) {
+        if (null == aList || aList.isEmpty()) {
+            throw new IllegalArgumentException("您提供的作业配置有误, List不能为空.");
+        }
+
+        if (1 == aList.size()) {
+            return true;
+        } else {
+            Boolean firstValue = aList.get(0);
+            for (int i = 1, len = aList.size(); i < len; i++) {
+                if (firstValue.booleanValue() != aList.get(i).booleanValue()) {
+                    return false;
+                }
+            }
+            return true;
+        }
+    }
+
+    public static List<String> valueToLowerCase(List<String> aList) {
+        if (null == aList || aList.isEmpty()) {
+            throw new IllegalArgumentException("您提供的作业配置有误, List不能为空.");
+        }
+        List<String> result = new ArrayList<String>(aList.size());
+        for (String oneValue : aList) {
+            result.add(null != oneValue ? oneValue.toLowerCase() : null);
+        }
+
+        return result;
+    }
+    
+    public static Boolean checkIfHasSameValue(List<String> listA, List<String> listB) {
+        if (null == listA || listA.isEmpty() || null == listB || listB.isEmpty()) {
+            return false;
+        }
+
+        for (String oneValue : listA) {
+            if (listB.contains(oneValue)) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+    
+    public static boolean checkIfAllSameValue(List<String> listA, List<String> listB) {
+        if (null == listA || listA.isEmpty() || null == listB || listB.isEmpty() || listA.size() != listB.size()) {
+            return false;
+        }
+        return new HashSet<>(listA).containsAll(new HashSet<>(listB));
+    }
+}

+ 54 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings.properties

@@ -0,0 +1,54 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+
+configuration.1=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef\uff0c\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6[{0}]\u4e0d\u5b58\u5728. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6.
+configuration.2=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6[{0}]\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.3=\u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {0}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.4=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.5=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.6=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u56e0\u4e3a\u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5\uff0c\u671f\u671b\u662f\u5b57\u7b26\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.7=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u6709\u8bef\uff0c\u56e0\u4e3a\u4ece[{0}]\u83b7\u53d6\u7684\u503c[{1}]\u65e0\u6cd5\u8f6c\u6362\u4e3abool\u7c7b\u578b. \u8bf7\u68c0\u67e5\u6e90\u8868\u7684\u914d\u7f6e\u5e76\u4e14\u505a\u51fa\u76f8\u5e94\u7684\u4fee\u6539.
+configuration.8=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.9=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.10=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6d6e\u70b9\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.11=\u914d\u7f6e\u6587\u4ef6\u5bf9\u5e94Key[{0}]\u5e76\u4e0d\u5b58\u5728\uff0c\u8be5\u60c5\u51b5\u662f\u4ee3\u7801\u7f16\u7a0b\u9519\u8bef. \u8bf7\u8054\u7cfbDataX\u56e2\u961f\u7684\u540c\u5b66.
+configuration.12=\u503c[{0}]\u65e0\u6cd5\u9002\u914d\u60a8\u63d0\u4f9b[{1}]\uff0c \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!
+configuration.13=Path\u4e0d\u80fd\u4e3anull\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.14=\u8def\u5f84[{0}]\u51fa\u73b0\u975e\u6cd5\u503c\u7c7b\u578b[{1}]\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f! .
+configuration.15=\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.16=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.17=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u503c\u4e3anull\uff0cdatax\u65e0\u6cd5\u8bc6\u522b\u8be5\u914d\u7f6e. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.18=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.19=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef\uff0c\u5217\u8868\u4e0b\u6807\u5fc5\u987b\u4e3a\u6570\u5b57\u7c7b\u578b\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{0}] \uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.20=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!.
+configuration.21=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8def\u5f84[{0}]\u4e0d\u5408\u6cd5, \u8def\u5f84\u5c42\u6b21\u4e4b\u95f4\u4e0d\u80fd\u51fa\u73b0\u7a7a\u767d\u5b57\u7b26 .
+configuration.22=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u56e0\u4e3a\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f, JSON\u4e0d\u80fd\u4e3a\u7a7a\u767d. \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f.
+configuration.23=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f: {0} . \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f. 
+
+
+listutil.1=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef\uff0cList\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.2=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.3=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5141\u8bb8\u91cd\u590d\u51fa\u73b0\u5728\u5217\u8868\u4e2d: [{1}].
+listutil.4=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.5=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.6=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5b58\u5728\u4e8e\u5217\u8868\u4e2d:[{1}].
+listutil.7=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.8=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+
+
+rangesplitutil.1=\u5207\u5206\u4efd\u6570\u4e0d\u80fd\u5c0f\u4e8e1. \u6b64\u5904:expectSliceNumber=[{0}].
+rangesplitutil.2=\u5bf9 BigInteger \u8fdb\u884c\u5207\u5206\u65f6\uff0c\u5176\u5de6\u53f3\u533a\u95f4\u4e0d\u80fd\u4e3a null. \u6b64\u5904:left=[{0}],right=[{1}].
+rangesplitutil.3=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.4=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+rangesplitutil.5=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.6=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+
+
+retryutil.1=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2callable\u4e0d\u80fd\u4e3a\u7a7a !
+retryutil.2=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2retrytime[%d]\u4e0d\u80fd\u5c0f\u4e8e1 !
+retryutil.3=Exception when calling callable, \u5f02\u5e38Msg:{0}
+retryutil.4=Exception when calling callable, \u5373\u5c06\u5c1d\u8bd5\u6267\u884c\u7b2c{0}\u6b21\u91cd\u8bd5,\u5171\u8ba1\u91cd\u8bd5{1}\u6b21.\u672c\u6b21\u91cd\u8bd5\u8ba1\u5212\u7b49\u5f85[{2}]ms,\u5b9e\u9645\u7b49\u5f85[{3}]ms, \u5f02\u5e38Msg:[{4}]
+
+
+httpclientutil.1=\u8BF7\u6C42\u5730\u5740\uFF1A{0}, \u8BF7\u6C42\u65B9\u6CD5\uFF1A{1}, STATUS CODE = {2}, Response Entity: {3} 
+httpclientutil.2=\u8FDC\u7A0B\u63A5\u53E3\u8FD4\u56DE-1,\u5C06\u91CD\u8BD5

+ 53 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_en_US.properties

@@ -0,0 +1,53 @@
+very_like_yixiao=1{0}2{1}3
+
+
+configuration.1=Configuration information error. The configuration file [{0}] you provided does not exist. Please check your configuration files. 
+configuration.2=Configuration information error. Failed to read the configuration file [{0}] you provided. Error reason: {1}. Please check the permission settings of your configuration files. 
+configuration.3=Please check your configuration files. Failed to read the configuration file you provided. Error reason: {0}. Please check the permission settings of your configuration files.
+configuration.4=The configuration file you provided contains errors. [{0}] is a required parameter and cannot be empty or blank. 
+configuration.5=The configuration file you provided contains errors. [{0}] is a required parameter and cannot be empty or blank.
+configuration.6=Task reading configuration file error. Invalid configuration file path [{0}] value. The expected value should be of the character type: {1}. Please check your configuration and make corrections.
+configuration.7=The configuration information you provided contains errors. The value [{1}] obtained from [{0}] cannot be converted to the Bool type. Please check the source table configuration and make corrections. 
+configuration.8=Task reading configuration file error. Invalid configuration file path [{0}] value. The expected value should be of the integer type: {1}. Please check your configuration and make corrections.
+configuration.9=Task reading configuration file error. Invalid configuration file path [{0}] value. The expected value should be of the integer type: {1}. Please check your configuration and make corrections.
+configuration.10=Task reading configuration file error. Invalid configuration file path [{0}] value. The expected value should be of the floating-point type: {1}. Please check your configuration and make corrections.
+configuration.11=The Key [{0}] for the configuration file does not exist. This is a code programming error. Please contact the DataX team. 
+configuration.12=The value [{0}] cannot adapt to the [{1}] you provided. This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.13=The path cannot be null. This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.14=The path [{0}] has an invalid value type [{1}]. This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.15=This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.16=The configuration file you provided contains errors. The path [{0}] requires you to configure a Map object in JSON format, but the actual type found on the node is [{1}]. Please check your configuration and make corrections. 
+configuration.17=The configuration file you provided contains errors. The value of the path [{0}] is null and DataX cannot recognize the configuration. Please check your configuration and make corrections. 
+configuration.18=The configuration file you provided contains errors. The path [{0}] requires you to configure a Map object in JSON format, but the actual type found on the node is [{1}]. Please check your configuration and make corrections.
+configuration.19=System programming error. The list subscript must be of the numeric type, but the actual type found on this node is [{0}]. This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.20=System programming error. This exception represents a system programming error. Please contact the DataX developer team. 
+configuration.21=System programming error. Invalid path [{0}]. No spaces are allowed between path layers. 
+configuration.22=Configuration information error. The configuration information you provided is not in a legal JSON format. JSON cannot be blank. Please provide the configuration information in the standard JSON format. 
+configuration.23=Configuration information error. The configuration information you provided is not in a valid JSON format: {0}. Please provide the configuration information in the standard JSON format.  
+
+
+listutil.1=The job configuration you provided contains errors. The list cannot be empty. 
+listutil.2=The job configuration you provided contains errors. The list cannot be empty.
+listutil.3=The job configuration information you provided contains errors. String: [{0}] is not allowed to be repeated in the list: [{1}]. 
+listutil.4=The job configuration you provided contains errors. The list cannot be empty.
+listutil.5=The job configuration you provided contains errors. The list cannot be empty.
+listutil.6=The job configuration information you provided contains errors. String: [{0}] does not exist in the list: [{1}]. 
+listutil.7=The job configuration you provided contains errors. The list cannot be empty.
+listutil.8=The job configuration you provided contains errors. The list cannot be empty.
+
+
+rangesplitutil.1=The slice number cannot be less than 1. Here: [expectSliceNumber]=[{0}].
+rangesplitutil.2=The left or right intervals of BigInteger character strings cannot be null when they are sliced. Here: [left]=[{0}], [right]=[{1}]. 
+rangesplitutil.3=The [bigInteger] parameter cannot be null.
+rangesplitutil.4=Only ASCII character strings are supported for character string slicing, but the [{0}] character string is not of the ASCII type. 
+rangesplitutil.5=The [bigInteger] parameter cannot be null.
+rangesplitutil.6=Only ASCII character strings are supported for character string slicing, but the [{0}] character string is not of the ASCII type.
+
+
+retryutil.1=System programming error. The “callable” input parameter cannot be null. 
+retryutil.2=System programming error. The “retrytime[%d]” input parameter cannot be less than 1. 
+retryutil.3=Exception when calling callable. Exception Msg: {0}
+retryutil.4=Exception when calling callable. Retry Attempt: {0} will start soon. {1} attempts in total. This attempt planned to wait for [{2}]ms, and actually waited for [{3}]ms. Exception Msg: [{4}].
+
+httpclientutil.1=Request address: {0}. Request method: {1}. STATUS CODE = {2}, Response Entity: {3}
+httpclientutil.2=The remote interface returns -1. We will try again

+ 53 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_ja_JP.properties

@@ -0,0 +1,53 @@
+very_like_yixiao=1{0}2{1}3
+
+
+configuration.1=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef\uff0c\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6[{0}]\u4e0d\u5b58\u5728. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6.
+configuration.2=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6[{0}]\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.3=\u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {0}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.4=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.5=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.6=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u56e0\u4e3a\u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5\uff0c\u671f\u671b\u662f\u5b57\u7b26\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.7=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u6709\u8bef\uff0c\u56e0\u4e3a\u4ece[{0}]\u83b7\u53d6\u7684\u503c[{1}]\u65e0\u6cd5\u8f6c\u6362\u4e3abool\u7c7b\u578b. \u8bf7\u68c0\u67e5\u6e90\u8868\u7684\u914d\u7f6e\u5e76\u4e14\u505a\u51fa\u76f8\u5e94\u7684\u4fee\u6539.
+configuration.8=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.9=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.10=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6d6e\u70b9\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.11=\u914d\u7f6e\u6587\u4ef6\u5bf9\u5e94Key[{0}]\u5e76\u4e0d\u5b58\u5728\uff0c\u8be5\u60c5\u51b5\u662f\u4ee3\u7801\u7f16\u7a0b\u9519\u8bef. \u8bf7\u8054\u7cfbDataX\u56e2\u961f\u7684\u540c\u5b66.
+configuration.12=\u503c[{0}]\u65e0\u6cd5\u9002\u914d\u60a8\u63d0\u4f9b[{1}]\uff0c \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!
+configuration.13=Path\u4e0d\u80fd\u4e3anull\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.14=\u8def\u5f84[{0}]\u51fa\u73b0\u975e\u6cd5\u503c\u7c7b\u578b[{1}]\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f! .
+configuration.15=\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.16=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.17=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u503c\u4e3anull\uff0cdatax\u65e0\u6cd5\u8bc6\u522b\u8be5\u914d\u7f6e. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.18=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.19=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef\uff0c\u5217\u8868\u4e0b\u6807\u5fc5\u987b\u4e3a\u6570\u5b57\u7c7b\u578b\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{0}] \uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.20=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!.
+configuration.21=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8def\u5f84[{0}]\u4e0d\u5408\u6cd5, \u8def\u5f84\u5c42\u6b21\u4e4b\u95f4\u4e0d\u80fd\u51fa\u73b0\u7a7a\u767d\u5b57\u7b26 .
+configuration.22=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u56e0\u4e3a\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f, JSON\u4e0d\u80fd\u4e3a\u7a7a\u767d. \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f.
+configuration.23=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f: {0} . \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f. 
+
+
+listutil.1=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef\uff0cList\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.2=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.3=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5141\u8bb8\u91cd\u590d\u51fa\u73b0\u5728\u5217\u8868\u4e2d: [{1}].
+listutil.4=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.5=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.6=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5b58\u5728\u4e8e\u5217\u8868\u4e2d:[{1}].
+listutil.7=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.8=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+
+
+rangesplitutil.1=\u5207\u5206\u4efd\u6570\u4e0d\u80fd\u5c0f\u4e8e1. \u6b64\u5904:expectSliceNumber=[{0}].
+rangesplitutil.2=\u5bf9 BigInteger \u8fdb\u884c\u5207\u5206\u65f6\uff0c\u5176\u5de6\u53f3\u533a\u95f4\u4e0d\u80fd\u4e3a null. \u6b64\u5904:left=[{0}],right=[{1}].
+rangesplitutil.3=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.4=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+rangesplitutil.5=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.6=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+
+
+retryutil.1=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2callable\u4e0d\u80fd\u4e3a\u7a7a !
+retryutil.2=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2retrytime[%d]\u4e0d\u80fd\u5c0f\u4e8e1 !
+retryutil.3=Exception when calling callable, \u5f02\u5e38Msg:{0}
+retryutil.4=Exception when calling callable, \u5373\u5c06\u5c1d\u8bd5\u6267\u884c\u7b2c{0}\u6b21\u91cd\u8bd5,\u5171\u8ba1\u91cd\u8bd5{1}\u6b21.\u672c\u6b21\u91cd\u8bd5\u8ba1\u5212\u7b49\u5f85[{2}]ms,\u5b9e\u9645\u7b49\u5f85[{3}]ms, \u5f02\u5e38Msg:[{4}]
+
+httpclientutil.1=\u8BF7\u6C42\u5730\u5740\uFF1A{0}, \u8BF7\u6C42\u65B9\u6CD5\uFF1A{1},STATUS CODE = {2}, Response Entity: {3}
+httpclientutil.2=\u8FDC\u7A0B\u63A5\u53E3\u8FD4\u56DE-1,\u5C06\u91CD\u8BD5

+ 54 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_CN.properties

@@ -0,0 +1,54 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+
+configuration.1=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef\uff0c\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6[{0}]\u4e0d\u5b58\u5728. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6.
+configuration.2=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6[{0}]\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.3=\u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {0}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.4=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.5=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.6=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u56e0\u4e3a\u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5\uff0c\u671f\u671b\u662f\u5b57\u7b26\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.7=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u6709\u8bef\uff0c\u56e0\u4e3a\u4ece[{0}]\u83b7\u53d6\u7684\u503c[{1}]\u65e0\u6cd5\u8f6c\u6362\u4e3abool\u7c7b\u578b. \u8bf7\u68c0\u67e5\u6e90\u8868\u7684\u914d\u7f6e\u5e76\u4e14\u505a\u51fa\u76f8\u5e94\u7684\u4fee\u6539.
+configuration.8=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.9=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.10=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6d6e\u70b9\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.11=\u914d\u7f6e\u6587\u4ef6\u5bf9\u5e94Key[{0}]\u5e76\u4e0d\u5b58\u5728\uff0c\u8be5\u60c5\u51b5\u662f\u4ee3\u7801\u7f16\u7a0b\u9519\u8bef. \u8bf7\u8054\u7cfbDataX\u56e2\u961f\u7684\u540c\u5b66.
+configuration.12=\u503c[{0}]\u65e0\u6cd5\u9002\u914d\u60a8\u63d0\u4f9b[{1}]\uff0c \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!
+configuration.13=Path\u4e0d\u80fd\u4e3anull\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.14=\u8def\u5f84[{0}]\u51fa\u73b0\u975e\u6cd5\u503c\u7c7b\u578b[{1}]\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f! .
+configuration.15=\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.16=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.17=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u503c\u4e3anull\uff0cdatax\u65e0\u6cd5\u8bc6\u522b\u8be5\u914d\u7f6e. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.18=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.19=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef\uff0c\u5217\u8868\u4e0b\u6807\u5fc5\u987b\u4e3a\u6570\u5b57\u7c7b\u578b\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{0}] \uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.20=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!.
+configuration.21=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8def\u5f84[{0}]\u4e0d\u5408\u6cd5, \u8def\u5f84\u5c42\u6b21\u4e4b\u95f4\u4e0d\u80fd\u51fa\u73b0\u7a7a\u767d\u5b57\u7b26 .
+configuration.22=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u56e0\u4e3a\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f, JSON\u4e0d\u80fd\u4e3a\u7a7a\u767d. \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f.
+configuration.23=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f: {0} . \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f. 
+
+
+listutil.1=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef\uff0cList\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.2=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.3=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5141\u8bb8\u91cd\u590d\u51fa\u73b0\u5728\u5217\u8868\u4e2d: [{1}].
+listutil.4=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.5=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.6=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5b58\u5728\u4e8e\u5217\u8868\u4e2d:[{1}].
+listutil.7=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.8=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+
+
+rangesplitutil.1=\u5207\u5206\u4efd\u6570\u4e0d\u80fd\u5c0f\u4e8e1. \u6b64\u5904:expectSliceNumber=[{0}].
+rangesplitutil.2=\u5bf9 BigInteger \u8fdb\u884c\u5207\u5206\u65f6\uff0c\u5176\u5de6\u53f3\u533a\u95f4\u4e0d\u80fd\u4e3a null. \u6b64\u5904:left=[{0}],right=[{1}].
+rangesplitutil.3=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.4=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+rangesplitutil.5=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.6=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+
+
+retryutil.1=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2callable\u4e0d\u80fd\u4e3a\u7a7a !
+retryutil.2=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2retrytime[%d]\u4e0d\u80fd\u5c0f\u4e8e1 !
+retryutil.3=Exception when calling callable, \u5f02\u5e38Msg:{0}
+retryutil.4=Exception when calling callable, \u5373\u5c06\u5c1d\u8bd5\u6267\u884c\u7b2c{0}\u6b21\u91cd\u8bd5,\u5171\u8ba1\u91cd\u8bd5{1}\u6b21.\u672c\u6b21\u91cd\u8bd5\u8ba1\u5212\u7b49\u5f85[{2}]ms,\u5b9e\u9645\u7b49\u5f85[{3}]ms, \u5f02\u5e38Msg:[{4}]
+
+
+httpclientutil.1=\u8BF7\u6C42\u5730\u5740\uFF1A{0}, \u8BF7\u6C42\u65B9\u6CD5\uFF1A{1},STATUS CODE = {2}, Response Entity: {3}
+httpclientutil.2=\u8FDC\u7A0B\u63A5\u53E3\u8FD4\u56DE-1,\u5C06\u91CD\u8BD5

+ 104 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_HK.properties

@@ -0,0 +1,104 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+
+configuration.1=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef\uff0c\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6[{0}]\u4e0d\u5b58\u5728. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6.
+configuration.2=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6[{0}]\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.3=\u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {0}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.4=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.5=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.6=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u56e0\u4e3a\u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5\uff0c\u671f\u671b\u662f\u5b57\u7b26\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.7=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u6709\u8bef\uff0c\u56e0\u4e3a\u4ece[{0}]\u83b7\u53d6\u7684\u503c[{1}]\u65e0\u6cd5\u8f6c\u6362\u4e3abool\u7c7b\u578b. \u8bf7\u68c0\u67e5\u6e90\u8868\u7684\u914d\u7f6e\u5e76\u4e14\u505a\u51fa\u76f8\u5e94\u7684\u4fee\u6539.
+configuration.8=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.9=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.10=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6d6e\u70b9\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.11=\u914d\u7f6e\u6587\u4ef6\u5bf9\u5e94Key[{0}]\u5e76\u4e0d\u5b58\u5728\uff0c\u8be5\u60c5\u51b5\u662f\u4ee3\u7801\u7f16\u7a0b\u9519\u8bef. \u8bf7\u8054\u7cfbDataX\u56e2\u961f\u7684\u540c\u5b66.
+configuration.12=\u503c[{0}]\u65e0\u6cd5\u9002\u914d\u60a8\u63d0\u4f9b[{1}]\uff0c \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!
+configuration.13=Path\u4e0d\u80fd\u4e3anull\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.14=\u8def\u5f84[{0}]\u51fa\u73b0\u975e\u6cd5\u503c\u7c7b\u578b[{1}]\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f! .
+configuration.15=\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.16=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.17=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u503c\u4e3anull\uff0cdatax\u65e0\u6cd5\u8bc6\u522b\u8be5\u914d\u7f6e. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.18=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.19=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef\uff0c\u5217\u8868\u4e0b\u6807\u5fc5\u987b\u4e3a\u6570\u5b57\u7c7b\u578b\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{0}] \uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.20=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!.
+configuration.21=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8def\u5f84[{0}]\u4e0d\u5408\u6cd5, \u8def\u5f84\u5c42\u6b21\u4e4b\u95f4\u4e0d\u80fd\u51fa\u73b0\u7a7a\u767d\u5b57\u7b26 .
+configuration.22=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u56e0\u4e3a\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f, JSON\u4e0d\u80fd\u4e3a\u7a7a\u767d. \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f.
+configuration.23=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f: {0} . \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f. 
+
+
+listutil.1=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef\uff0cList\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.2=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.3=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5141\u8bb8\u91cd\u590d\u51fa\u73b0\u5728\u5217\u8868\u4e2d: [{1}].
+listutil.4=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.5=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.6=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5b58\u5728\u4e8e\u5217\u8868\u4e2d:[{1}].
+listutil.7=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.8=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+
+
+rangesplitutil.1=\u5207\u5206\u4efd\u6570\u4e0d\u80fd\u5c0f\u4e8e1. \u6b64\u5904:expectSliceNumber=[{0}].
+rangesplitutil.2=\u5bf9 BigInteger \u8fdb\u884c\u5207\u5206\u65f6\uff0c\u5176\u5de6\u53f3\u533a\u95f4\u4e0d\u80fd\u4e3a null. \u6b64\u5904:left=[{0}],right=[{1}].
+rangesplitutil.3=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.4=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+rangesplitutil.5=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.6=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+
+
+retryutil.1=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2callable\u4e0d\u80fd\u4e3a\u7a7a !
+retryutil.2=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2retrytime[%d]\u4e0d\u80fd\u5c0f\u4e8e1 !
+retryutil.3=Exception when calling callable, \u5f02\u5e38Msg:{0}
+retryutil.4=Exception when calling callable, \u5373\u5c06\u5c1d\u8bd5\u6267\u884c\u7b2c{0}\u6b21\u91cd\u8bd5,\u5171\u8ba1\u91cd\u8bd5{1}\u6b21.\u672c\u6b21\u91cd\u8bd5\u8ba1\u5212\u7b49\u5f85[{2}]ms,\u5b9e\u9645\u7b49\u5f85[{3}]ms, \u5f02\u5e38Msg:[{4}]
+
+very_like_yixiao=一{0}二{1}三
+
+
+configuration.1=配置資訊錯誤,您提供的配置檔案[{0}]不存在. 請檢查您的配置檔案.
+configuration.2=配置資訊錯誤. 您提供配置檔案[{0}]讀取失敗,錯誤原因: {1}. 請檢查您的配置檔案的權限設定.
+configuration.3=請檢查您的配置檔案. 您提供的配置檔案讀取失敗,錯誤原因: {0}. 請檢查您的配置檔案的權限設定.
+configuration.4=您提供配置檔案有誤,[{0}]是必填參數,不允許為空或者留白 .
+configuration.5=您提供配置檔案有誤,[{0}]是必填參數,不允許為空或者留白 .
+configuration.6=任務讀取配置檔案出錯. 因為配置檔案路徑[{0}] 值不合法,期望是字符類型: {1}. 請檢查您的配置並作出修改.
+configuration.7=您提供的配置資訊有誤,因為從[{0}]獲取的值[{1}]無法轉換為bool類型. 請檢查源表的配置並且做出相應的修改.
+configuration.8=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是整數類型: {1}. 請檢查您的配置並作出修改.
+configuration.9=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是整數類型: {1}. 請檢查您的配置並作出修改.
+configuration.10=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是浮點類型: {1}. 請檢查您的配置並作出修改.
+configuration.11=配置檔案對應Key[{0}]並不存在,該情況是代碼編程錯誤. 請聯絡DataX團隊的同學.
+configuration.12=值[{0}]無法適配您提供[{1}], 該異常代表系統編程錯誤, 請聯絡DataX開發團隊!
+configuration.13=Path不能為null,該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.14=路徑[{0}]出現不合法值類型[{1}],該異常代表系統編程錯誤, 請聯絡DataX開發團隊! .
+configuration.15=該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.16=您提供的配置檔案有誤. 路徑[{0}]需要配置Json格式的Map對象,但該節點發現實際類型是[{1}]. 請檢查您的配置並作出修改.
+configuration.17=您提供的配置檔案有誤. 路徑[{0}]值為null,datax無法識別該配置. 請檢查您的配置並作出修改.
+configuration.18=您提供的配置檔案有誤. 路徑[{0}]需要配置Json格式的Map對象,但該節點發現實際類型是[{1}]. 請檢查您的配置並作出修改.
+configuration.19=系統編程錯誤,清單下標必須為數字類型,但該節點發現實際類型是[{0}] ,該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.20=系統編程錯誤, 該異常代表系統編程錯誤, 請聯絡DataX開發團隊!.
+configuration.21=系統編程錯誤, 路徑[{0}]不合法, 路徑層次之間不能出現空白字符 .
+configuration.22=配置資訊錯誤. 因為您提供的配置資訊不是合法的JSON格式, JSON不能為空白. 請按照標準json格式提供配置資訊.
+configuration.23=配置資訊錯誤. 您提供的配置資訊不是合法的JSON格式: {0}. 請按照標準json格式提供配置資訊. 
+
+
+listutil.1=您提供的作業配置有誤,List不能為空.
+listutil.2=您提供的作業配置有誤, List不能為空.
+listutil.3=您提供的作業配置資訊有誤, String:[{0}]不允許重複出現在清單中: [{1}].
+listutil.4=您提供的作業配置有誤, List不能為空.
+listutil.5=您提供的作業配置有誤, List不能為空.
+listutil.6=您提供的作業配置資訊有誤, String:[{0}]不存在於清單中:[{1}].
+listutil.7=您提供的作業配置有誤, List不能為空.
+listutil.8=您提供的作業配置有誤, List不能為空.
+
+
+rangesplitutil.1=切分份數不能小於1. 此處:expectSliceNumber=[{0}].
+rangesplitutil.2=對 BigInteger 進行切分時,其左右區間不能為 null. 此處:left=[{0}],right=[{1}].
+rangesplitutil.3=參數 bigInteger 不能為空.
+rangesplitutil.4=根據字符串進行切分時僅支援 ASCII 字符串,而字符串:[{0}]非 ASCII 字符串.
+rangesplitutil.5=參數 bigInteger 不能為空.
+rangesplitutil.6=根據字符串進行切分時僅支援 ASCII 字符串,而字符串:[{0}]非 ASCII 字符串.
+
+
+retryutil.1=系統編程錯誤, 入參callable不能為空 !
+retryutil.2=系統編程錯誤, 入參retrytime[%d]不能小於1 !
+retryutil.3=Exception when calling callable, 異常Msg:{0}
+retryutil.4=Exception when calling callable, 即將嘗試執行第{0}次重試,共計重試{1}次.本次重試計劃等待[{2}]ms,實際等待[{3}]ms, 異常Msg:[{4}]
+
+httpclientutil.1=\u8ACB\u6C42\u5730\u5740\uFF1A{0}, \u8ACB\u6C42\u65B9\u6CD5\uFF1A{1},STATUS CODE = {2}, Response Entity: {3}
+httpclientutil.2=\u9060\u7A0B\u63A5\u53E3\u8FD4\u56DE-1,\u5C07\u91CD\u8A66

+ 104 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/LocalStrings_zh_TW.properties

@@ -0,0 +1,104 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+
+configuration.1=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef\uff0c\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6[{0}]\u4e0d\u5b58\u5728. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6.
+configuration.2=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6[{0}]\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.3=\u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u8bfb\u53d6\u5931\u8d25\uff0c\u9519\u8bef\u539f\u56e0: {0}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u6587\u4ef6\u7684\u6743\u9650\u8bbe\u7f6e.
+configuration.4=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.5=\u60a8\u63d0\u4f9b\u914d\u7f6e\u6587\u4ef6\u6709\u8bef\uff0c[{0}]\u662f\u5fc5\u586b\u53c2\u6570\uff0c\u4e0d\u5141\u8bb8\u4e3a\u7a7a\u6216\u8005\u7559\u767d .
+configuration.6=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u56e0\u4e3a\u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5\uff0c\u671f\u671b\u662f\u5b57\u7b26\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.7=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u6709\u8bef\uff0c\u56e0\u4e3a\u4ece[{0}]\u83b7\u53d6\u7684\u503c[{1}]\u65e0\u6cd5\u8f6c\u6362\u4e3abool\u7c7b\u578b. \u8bf7\u68c0\u67e5\u6e90\u8868\u7684\u914d\u7f6e\u5e76\u4e14\u505a\u51fa\u76f8\u5e94\u7684\u4fee\u6539.
+configuration.8=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.9=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6574\u6570\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.10=\u4efb\u52a1\u8bfb\u53d6\u914d\u7f6e\u6587\u4ef6\u51fa\u9519. \u914d\u7f6e\u6587\u4ef6\u8def\u5f84[{0}] \u503c\u975e\u6cd5, \u671f\u671b\u662f\u6d6e\u70b9\u7c7b\u578b: {1}. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.11=\u914d\u7f6e\u6587\u4ef6\u5bf9\u5e94Key[{0}]\u5e76\u4e0d\u5b58\u5728\uff0c\u8be5\u60c5\u51b5\u662f\u4ee3\u7801\u7f16\u7a0b\u9519\u8bef. \u8bf7\u8054\u7cfbDataX\u56e2\u961f\u7684\u540c\u5b66.
+configuration.12=\u503c[{0}]\u65e0\u6cd5\u9002\u914d\u60a8\u63d0\u4f9b[{1}]\uff0c \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!
+configuration.13=Path\u4e0d\u80fd\u4e3anull\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.14=\u8def\u5f84[{0}]\u51fa\u73b0\u975e\u6cd5\u503c\u7c7b\u578b[{1}]\uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f! .
+configuration.15=\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.16=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.17=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u503c\u4e3anull\uff0cdatax\u65e0\u6cd5\u8bc6\u522b\u8be5\u914d\u7f6e. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.18=\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u6587\u4ef6\u6709\u8bef. \u8def\u5f84[{0}]\u9700\u8981\u914d\u7f6eJson\u683c\u5f0f\u7684Map\u5bf9\u8c61\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{1}]. \u8bf7\u68c0\u67e5\u60a8\u7684\u914d\u7f6e\u5e76\u4f5c\u51fa\u4fee\u6539.
+configuration.19=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef\uff0c\u5217\u8868\u4e0b\u6807\u5fc5\u987b\u4e3a\u6570\u5b57\u7c7b\u578b\uff0c\u4f46\u8be5\u8282\u70b9\u53d1\u73b0\u5b9e\u9645\u7c7b\u578b\u662f[{0}] \uff0c\u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f !
+configuration.20=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8be5\u5f02\u5e38\u4ee3\u8868\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8bf7\u8054\u7cfbDataX\u5f00\u53d1\u56e2\u961f!.
+configuration.21=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u8def\u5f84[{0}]\u4e0d\u5408\u6cd5, \u8def\u5f84\u5c42\u6b21\u4e4b\u95f4\u4e0d\u80fd\u51fa\u73b0\u7a7a\u767d\u5b57\u7b26 .
+configuration.22=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u56e0\u4e3a\u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f, JSON\u4e0d\u80fd\u4e3a\u7a7a\u767d. \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f.
+configuration.23=\u914d\u7f6e\u4fe1\u606f\u9519\u8bef. \u60a8\u63d0\u4f9b\u7684\u914d\u7f6e\u4fe1\u606f\u4e0d\u662f\u5408\u6cd5\u7684JSON\u683c\u5f0f: {0} . \u8bf7\u6309\u7167\u6807\u51c6json\u683c\u5f0f\u63d0\u4f9b\u914d\u7f6e\u4fe1\u606f. 
+
+
+listutil.1=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef\uff0cList\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.2=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.3=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5141\u8bb8\u91cd\u590d\u51fa\u73b0\u5728\u5217\u8868\u4e2d: [{1}].
+listutil.4=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.5=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.6=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u4fe1\u606f\u6709\u8bef, String:[{0}] \u4e0d\u5b58\u5728\u4e8e\u5217\u8868\u4e2d:[{1}].
+listutil.7=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+listutil.8=\u60a8\u63d0\u4f9b\u7684\u4f5c\u4e1a\u914d\u7f6e\u6709\u8bef, List\u4e0d\u80fd\u4e3a\u7a7a.
+
+
+rangesplitutil.1=\u5207\u5206\u4efd\u6570\u4e0d\u80fd\u5c0f\u4e8e1. \u6b64\u5904:expectSliceNumber=[{0}].
+rangesplitutil.2=\u5bf9 BigInteger \u8fdb\u884c\u5207\u5206\u65f6\uff0c\u5176\u5de6\u53f3\u533a\u95f4\u4e0d\u80fd\u4e3a null. \u6b64\u5904:left=[{0}],right=[{1}].
+rangesplitutil.3=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.4=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+rangesplitutil.5=\u53c2\u6570 bigInteger \u4e0d\u80fd\u4e3a\u7a7a.
+rangesplitutil.6=\u6839\u636e\u5b57\u7b26\u4e32\u8fdb\u884c\u5207\u5206\u65f6\u4ec5\u652f\u6301 ASCII \u5b57\u7b26\u4e32\uff0c\u800c\u5b57\u7b26\u4e32:[{0}]\u975e ASCII \u5b57\u7b26\u4e32.
+
+
+retryutil.1=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2callable\u4e0d\u80fd\u4e3a\u7a7a !
+retryutil.2=\u7cfb\u7edf\u7f16\u7a0b\u9519\u8bef, \u5165\u53c2retrytime[%d]\u4e0d\u80fd\u5c0f\u4e8e1 !
+retryutil.3=Exception when calling callable, \u5f02\u5e38Msg:{0}
+retryutil.4=Exception when calling callable, \u5373\u5c06\u5c1d\u8bd5\u6267\u884c\u7b2c{0}\u6b21\u91cd\u8bd5,\u5171\u8ba1\u91cd\u8bd5{1}\u6b21.\u672c\u6b21\u91cd\u8bd5\u8ba1\u5212\u7b49\u5f85[{2}]ms,\u5b9e\u9645\u7b49\u5f85[{3}]ms, \u5f02\u5e38Msg:[{4}]
+
+very_like_yixiao=一{0}二{1}三
+
+
+configuration.1=配置資訊錯誤,您提供的配置檔案[{0}]不存在. 請檢查您的配置檔案.
+configuration.2=配置資訊錯誤. 您提供配置檔案[{0}]讀取失敗,錯誤原因: {1}. 請檢查您的配置檔案的權限設定.
+configuration.3=請檢查您的配置檔案. 您提供的配置檔案讀取失敗,錯誤原因: {0}. 請檢查您的配置檔案的權限設定.
+configuration.4=您提供配置檔案有誤,[{0}]是必填參數,不允許為空或者留白 .
+configuration.5=您提供配置檔案有誤,[{0}]是必填參數,不允許為空或者留白 .
+configuration.6=任務讀取配置檔案出錯. 因為配置檔案路徑[{0}] 值不合法,期望是字符類型: {1}. 請檢查您的配置並作出修改.
+configuration.7=您提供的配置資訊有誤,因為從[{0}]獲取的值[{1}]無法轉換為bool類型. 請檢查源表的配置並且做出相應的修改.
+configuration.8=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是整數類型: {1}. 請檢查您的配置並作出修改.
+configuration.9=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是整數類型: {1}. 請檢查您的配置並作出修改.
+configuration.10=任務讀取配置檔案出錯. 配置檔案路徑[{0}] 值不合法, 期望是浮點類型: {1}. 請檢查您的配置並作出修改.
+configuration.11=配置檔案對應Key[{0}]並不存在,該情況是代碼編程錯誤. 請聯絡DataX團隊的同學.
+configuration.12=值[{0}]無法適配您提供[{1}], 該異常代表系統編程錯誤, 請聯絡DataX開發團隊!
+configuration.13=Path不能為null,該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.14=路徑[{0}]出現不合法值類型[{1}],該異常代表系統編程錯誤, 請聯絡DataX開發團隊! .
+configuration.15=該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.16=您提供的配置檔案有誤. 路徑[{0}]需要配置Json格式的Map對象,但該節點發現實際類型是[{1}]. 請檢查您的配置並作出修改.
+configuration.17=您提供的配置檔案有誤. 路徑[{0}]值為null,datax無法識別該配置. 請檢查您的配置並作出修改.
+configuration.18=您提供的配置檔案有誤. 路徑[{0}]需要配置Json格式的Map對象,但該節點發現實際類型是[{1}]. 請檢查您的配置並作出修改.
+configuration.19=系統編程錯誤,清單下標必須為數字類型,但該節點發現實際類型是[{0}] ,該異常代表系統編程錯誤, 請聯絡DataX開發團隊 !
+configuration.20=系統編程錯誤, 該異常代表系統編程錯誤, 請聯絡DataX開發團隊!.
+configuration.21=系統編程錯誤, 路徑[{0}]不合法, 路徑層次之間不能出現空白字符 .
+configuration.22=配置資訊錯誤. 因為您提供的配置資訊不是合法的JSON格式, JSON不能為空白. 請按照標準json格式提供配置資訊.
+configuration.23=配置資訊錯誤. 您提供的配置資訊不是合法的JSON格式: {0}. 請按照標準json格式提供配置資訊. 
+
+
+listutil.1=您提供的作業配置有誤,List不能為空.
+listutil.2=您提供的作業配置有誤, List不能為空.
+listutil.3=您提供的作業配置資訊有誤, String:[{0}]不允許重複出現在清單中: [{1}].
+listutil.4=您提供的作業配置有誤, List不能為空.
+listutil.5=您提供的作業配置有誤, List不能為空.
+listutil.6=您提供的作業配置資訊有誤, String:[{0}]不存在於清單中:[{1}].
+listutil.7=您提供的作業配置有誤, List不能為空.
+listutil.8=您提供的作業配置有誤, List不能為空.
+
+
+rangesplitutil.1=切分份數不能小於1. 此處:expectSliceNumber=[{0}].
+rangesplitutil.2=對 BigInteger 進行切分時,其左右區間不能為 null. 此處:left=[{0}],right=[{1}].
+rangesplitutil.3=參數 bigInteger 不能為空.
+rangesplitutil.4=根據字符串進行切分時僅支援 ASCII 字符串,而字符串:[{0}]非 ASCII 字符串.
+rangesplitutil.5=參數 bigInteger 不能為空.
+rangesplitutil.6=根據字符串進行切分時僅支援 ASCII 字符串,而字符串:[{0}]非 ASCII 字符串.
+
+
+retryutil.1=系統編程錯誤, 入參callable不能為空 !
+retryutil.2=系統編程錯誤, 入參retrytime[%d]不能小於1 !
+retryutil.3=Exception when calling callable, 異常Msg:{0}
+retryutil.4=Exception when calling callable, 即將嘗試執行第{0}次重試,共計重試{1}次.本次重試計劃等待[{2}]ms,實際等待[{3}]ms, 異常Msg:[{4}]
+
+httpclientutil.1=\u8BF7\u6C42\u5730\u5740\uFF1A{0}, \u8BF7\u6C42\u65B9\u6CD5\uFF1A{1},STATUS CODE = {2}, Response Entity: {3}
+httpclientutil.2=\u8FDC\u7A0B\u63A5\u53E3\u8FD4\u56DE-1,\u5C06\u91CD\u8BD5

+ 203 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/MessageSource.java

@@ -0,0 +1,203 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.util.StrUtil;
+import com.sun.org.apache.xml.internal.utils.LocaleUtility;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.MessageFormat;
+import java.util.*;
+
+
+public class MessageSource {
+    private static final Logger LOG = LoggerFactory.getLogger(MessageSource.class);
+    private static Map<String, ResourceBundle> resourceBundleCache = new HashMap<String, ResourceBundle>();
+    public static Locale locale = null;
+    public static TimeZone timeZone = null;
+    private ResourceBundle resourceBundle = null;
+
+
+    private MessageSource(ResourceBundle resourceBundle) {
+        this.resourceBundle = resourceBundle;
+    }
+
+    /**
+     * @param baseName
+     *            demo: javax.servlet.http.LocalStrings
+     * 
+     * @throws MissingResourceException
+     *             - if no resource bundle for the specified base name can be
+     *             found
+     * */
+    public static MessageSource loadResourceBundle(String baseName) {
+        return loadResourceBundle(baseName, MessageSource.locale,
+                MessageSource.timeZone);
+    }
+
+    /**
+     * @param clazz
+     *            根据其获取package name
+     * */
+    public static <T> MessageSource loadResourceBundle(Class<T> clazz) {
+        return loadResourceBundle(clazz.getPackage().getName());
+    }
+
+    /**
+     * @param clazz
+     *            根据其获取package name
+     * */
+    public static <T> MessageSource loadResourceBundle(Class<T> clazz,
+                                                       Locale locale, TimeZone timeZone) {
+        return loadResourceBundle(clazz.getPackage().getName(), locale,
+                timeZone);
+    }
+
+    /**
+     * warn: 
+     *   ok: ResourceBundle.getBundle("xxx.LocalStrings", Locale.getDefault(), LoadUtil.getJarLoader(PluginType.WRITER, "odpswriter"))
+     *   error: ResourceBundle.getBundle("xxx.LocalStrings", Locale.getDefault(), LoadUtil.getJarLoader(PluginType.WRITER, "odpswriter"))
+     * @param baseName
+     *            demo: javax.servlet.http.LocalStrings
+     * 
+     * @throws MissingResourceException
+     *             - if no resource bundle for the specified base name can be
+     *             found
+     *             
+     * */
+    public static MessageSource loadResourceBundle(String baseName,
+                                                   Locale locale, TimeZone timeZone) {
+        ResourceBundle resourceBundle = null;
+        if (null == locale) {
+            locale=LocaleUtility.langToLocale("en_US");
+        }
+        if (null == timeZone) {
+            timeZone = TimeZone.getDefault();
+        }
+        String resourceBaseName = String.format("%s.LocalStrings", baseName);
+        LOG.debug(
+                "initEnvironment MessageSource.locale[{}], MessageSource.timeZone[{}]",
+                MessageSource.locale, MessageSource.timeZone);
+        LOG.debug(
+                "loadResourceBundle with locale[{}], timeZone[{}], baseName[{}]",
+                locale, timeZone, resourceBaseName);
+        // warn: 这个map的维护需要考虑Local吗, no?
+        if (!MessageSource.resourceBundleCache.containsKey(resourceBaseName)) {
+            ClassLoader clazzLoader = Thread.currentThread()
+                    .getContextClassLoader();
+            LOG.debug("loadResourceBundle classLoader:{}", clazzLoader);
+            resourceBundle = ResourceBundle.getBundle(resourceBaseName, locale,
+                    clazzLoader);
+            MessageSource.resourceBundleCache.put(resourceBaseName,
+                    resourceBundle);
+        } else {
+            resourceBundle = MessageSource.resourceBundleCache
+                    .get(resourceBaseName);
+        }
+
+        return new MessageSource(resourceBundle);
+    }
+    
+    public static <T> boolean unloadResourceBundle(Class<T> clazz) {
+        String baseName = clazz.getPackage().getName();
+        String resourceBaseName = String.format("%s.LocalStrings", baseName);
+        if (!MessageSource.resourceBundleCache.containsKey(resourceBaseName)) {
+            return false;
+        } else {
+            MessageSource.resourceBundleCache.remove(resourceBaseName);
+            return true;
+        }
+    }
+    
+    public static <T> MessageSource reloadResourceBundle(Class<T> clazz) {
+        MessageSource.unloadResourceBundle(clazz);
+        return MessageSource.loadResourceBundle(clazz);
+    }
+
+    public static void setEnvironment(Locale locale, TimeZone timeZone) {
+        // warn: 设置默认?  @2018.03.21 将此处注释移除,否则在国际化多时区下会遇到问题
+        Locale.setDefault(locale);
+        TimeZone.setDefault(timeZone);
+        MessageSource.locale = locale;
+        MessageSource.timeZone = timeZone;
+        LOG.info("use Locale: {} timeZone: {}", locale, timeZone);
+    }
+
+    public static void init(final Configuration configuration) {
+        Locale locale2Set = Locale.getDefault();
+        String localeStr = configuration.getString("common.column.locale", "zh_CN");// 默认操作系统的
+        if (StrUtil.isNotBlank(localeStr)) {
+            try {
+                locale2Set = LocaleUtility.langToLocale(localeStr);
+            } catch (Exception e) {
+                LOG.warn("ignored locale parse exception: {}", e.getMessage());
+            }
+        }
+
+        TimeZone timeZone2Set = TimeZone.getDefault();
+        String timeZoneStr = configuration.getString("common.column.timeZone");// 默认操作系统的
+        if (StrUtil.isNotBlank(timeZoneStr)) {
+            try {
+                timeZone2Set = TimeZone.getTimeZone(timeZoneStr);
+            } catch (Exception e) {
+                LOG.warn("ignored timezone parse exception: {}", e.getMessage());
+            }
+        }
+
+        LOG.info("JVM TimeZone: {}, Locale: {}", timeZone2Set.getID(), locale2Set);
+        MessageSource.setEnvironment(locale2Set, timeZone2Set);
+    }
+
+    public static void clearCache() {
+        MessageSource.resourceBundleCache.clear();
+    }
+
+    public String message(String code) {
+        return this.messageWithDefaultMessage(code, null);
+    }
+
+    public String message(String code, String args1) {
+        return this.messageWithDefaultMessage(code, null,
+                new Object[] { args1 });
+    }
+
+    public String message(String code, String args1, String args2) {
+        return this.messageWithDefaultMessage(code, null, new Object[] { args1,
+                args2 });
+    }
+
+    public String message(String code, String args1, String args2, String args3) {
+        return this.messageWithDefaultMessage(code, null, new Object[] { args1,
+                args2, args3 });
+    }
+
+    // 上面几个重载可以应对大多数情况, 避免使用这个可以提高性能的
+    public String message(String code, Object... args) {
+        return this.messageWithDefaultMessage(code, null, args);
+    }
+
+    public String messageWithDefaultMessage(String code, String defaultMessage) {
+        return this.messageWithDefaultMessage(code, defaultMessage,
+                new Object[] {});
+    }
+
+    /**
+     * @param args
+     *            MessageFormat会依次调用对应对象的toString方法
+     * */
+    public String messageWithDefaultMessage(String code, String defaultMessage,
+                                            Object... args) {
+        String messageStr = null;
+        try {
+            messageStr = this.resourceBundle.getString(code);
+        } catch (MissingResourceException e) {
+            messageStr = defaultMessage;
+        }
+        if (null != messageStr && null != args && args.length > 0) {
+            // warn: see loadResourceBundle set default locale
+            return MessageFormat.format(messageStr, args);
+        } else {
+            return messageStr;
+        }
+
+    }
+}

+ 230 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/RangeSplitUtil.java

@@ -0,0 +1,230 @@
+package cn.tr.plugin.dataX.common.util;
+
+import cn.hutool.core.lang.Pair;
+
+import java.math.BigInteger;
+import java.util.*;
+
+/**
+ * 提供通用的根据数字范围、字符串范围等进行切分的通用功能.
+ */
+public final class RangeSplitUtil {
+
+    public static String[] doAsciiStringSplit(String left, String right, int expectSliceNumber) {
+        int radix = 128;
+
+        BigInteger[] tempResult = doBigIntegerSplit(stringToBigInteger(left, radix),
+                stringToBigInteger(right, radix), expectSliceNumber);
+        String[] result = new String[tempResult.length];
+
+        //处理第一个字符串(因为:在转换为数字,再还原的时候,如果首字符刚好是 basic,则不知道应该添加多少个 basic)
+        result[0] = left;
+        result[tempResult.length - 1] = right;
+
+        for (int i = 1, len = tempResult.length - 1; i < len; i++) {
+            result[i] = bigIntegerToString(tempResult[i], radix);
+        }
+
+        return result;
+    }
+
+
+    public static long[] doLongSplit(long left, long right, int expectSliceNumber) {
+        BigInteger[] result = doBigIntegerSplit(BigInteger.valueOf(left),
+                BigInteger.valueOf(right), expectSliceNumber);
+        long[] returnResult = new long[result.length];
+        for (int i = 0, len = result.length; i < len; i++) {
+            returnResult[i] = result[i].longValue();
+        }
+        return returnResult;
+    }
+
+    public static BigInteger[] doBigIntegerSplit(BigInteger left, BigInteger right, int expectSliceNumber) {
+        if (expectSliceNumber < 1) {
+            throw new IllegalArgumentException(String.format(
+                    "切分份数不能小于1. 此处:expectSliceNumber=[%s].", expectSliceNumber));
+        }
+
+        if (null == left || null == right) {
+            throw new IllegalArgumentException(String.format(
+                    "对 BigInteger 进行切分时,其左右区间不能为 null. 此处:left=[%s],right=[%s].", left, right));
+        }
+
+        if (left.compareTo(right) == 0) {
+            return new BigInteger[]{left, right};
+        } else {
+            // 调整大小顺序,确保 left < right
+            if (left.compareTo(right) > 0) {
+                BigInteger temp = left;
+                left = right;
+                right = temp;
+            }
+
+            //left < right
+            BigInteger endAndStartGap = right.subtract(left);
+
+            BigInteger step = endAndStartGap.divide(BigInteger.valueOf(expectSliceNumber));
+            BigInteger remainder = endAndStartGap.remainder(BigInteger.valueOf(expectSliceNumber));
+
+            //remainder 不可能超过expectSliceNumber,所以不需要检查remainder的 Integer 的范围
+
+            // 这里不能 step.intValue()==0,因为可能溢出
+            if (step.compareTo(BigInteger.ZERO) == 0) {
+                expectSliceNumber = remainder.intValue();
+            }
+
+            BigInteger[] result = new BigInteger[expectSliceNumber + 1];
+            result[0] = left;
+            result[expectSliceNumber] = right;
+
+            BigInteger lowerBound;
+            BigInteger upperBound = left;
+            for (int i = 1; i < expectSliceNumber; i++) {
+                lowerBound = upperBound;
+                upperBound = lowerBound.add(step);
+                upperBound = upperBound.add((remainder.compareTo(BigInteger.valueOf(i)) >= 0)
+                        ? BigInteger.ONE : BigInteger.ZERO);
+                result[i] = upperBound;
+            }
+
+            return result;
+        }
+    }
+
+    private static void checkIfBetweenRange(int value, int left, int right) {
+        if (value < left || value > right) {
+            throw new IllegalArgumentException(String.format("parameter can not <[%s] or >[%s].",
+                    left, right));
+        }
+    }
+
+    /**
+     * 由于只支持 ascii 码对应字符,所以radix 范围为[1,128]
+     */
+    public static BigInteger stringToBigInteger(String aString, int radix) {
+        if (null == aString) {
+            throw new IllegalArgumentException("参数 bigInteger 不能为空.");
+        }
+
+        checkIfBetweenRange(radix, 1, 128);
+
+        BigInteger result = BigInteger.ZERO;
+        BigInteger radixBigInteger = BigInteger.valueOf(radix);
+
+        int tempChar;
+        int k = 0;
+
+        for (int i = aString.length() - 1; i >= 0; i--) {
+            tempChar = aString.charAt(i);
+            if (tempChar >= 128) {
+                throw new IllegalArgumentException(String.format("根据字符串进行切分时仅支持 ASCII 字符串,而字符串:[%s]非 ASCII 字符串.", aString));
+            }
+            result = result.add(BigInteger.valueOf(tempChar).multiply(radixBigInteger.pow(k)));
+            k++;
+        }
+
+        return result;
+    }
+
+    /**
+     * 把BigInteger 转换为 String.注意:radix 和 basic 范围都为[1,128], radix + basic 的范围也必须在[1,128].
+     */
+    private static String bigIntegerToString(BigInteger bigInteger, int radix) {
+        if (null == bigInteger) {
+            throw new IllegalArgumentException("参数 bigInteger 不能为空.");
+        }
+
+        checkIfBetweenRange(radix, 1, 128);
+
+        StringBuilder resultStringBuilder = new StringBuilder();
+
+        List<Integer> list = new ArrayList<Integer>();
+        BigInteger radixBigInteger = BigInteger.valueOf(radix);
+        BigInteger currentValue = bigInteger;
+
+        BigInteger quotient = currentValue.divide(radixBigInteger);
+        while (quotient.compareTo(BigInteger.ZERO) > 0) {
+            list.add(currentValue.remainder(radixBigInteger).intValue());
+            currentValue = currentValue.divide(radixBigInteger);
+            quotient = currentValue;
+        }
+        Collections.reverse(list);
+
+        if (list.isEmpty()) {
+            list.add(0, bigInteger.remainder(radixBigInteger).intValue());
+        }
+
+        Map<Integer, Character> map = new HashMap<Integer, Character>();
+        for (int i = 0; i < radix; i++) {
+            map.put(i, (char) (i));
+        }
+
+//        String msg = String.format("%s 转为 %s 进制,结果为:%s", bigInteger.longValue(), radix, list);
+//        System.out.println(msg);
+
+        for (Integer aList : list) {
+            resultStringBuilder.append(map.get(aList));
+        }
+
+        return resultStringBuilder.toString();
+    }
+
+    /**
+     * 获取字符串中的最小字符和最大字符(依据 ascii 进行判断).要求字符串必须非空,并且为 ascii 字符串.
+     * 返回的Pair,left=最小字符,right=最大字符.
+     */
+    public static Pair<Character, Character> getMinAndMaxCharacter(String aString) {
+        if (!isPureAscii(aString)) {
+            throw new IllegalArgumentException(String.format("根据字符串进行切分时仅支持 ASCII 字符串,而字符串:[%s]非 ASCII 字符串.", aString));
+        }
+
+        char min = aString.charAt(0);
+        char max = min;
+
+        char temp;
+        for (int i = 1, len = aString.length(); i < len; i++) {
+            temp = aString.charAt(i);
+            min = min < temp ? min : temp;
+            max = max > temp ? max : temp;
+        }
+        return Pair.of(min, max);
+    }
+
+    private static boolean isPureAscii(String aString) {
+        if (null == aString) {
+            return false;
+        }
+
+        for (int i = 0, len = aString.length(); i < len; i++) {
+            char ch = aString.charAt(i);
+            if (ch >= 127 || ch < 0) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+
+    /**
+     * List拆分工具函数,主要用于reader插件的split拆分逻辑
+     * */
+    public static <T> List<List<T>> doListSplit(List<T> objects, int adviceNumber) {
+        List<List<T>> splitLists = new ArrayList<List<T>>();
+        if (null == objects) {
+            return splitLists;
+        }
+        long[] splitPoint = RangeSplitUtil.doLongSplit(0, objects.size(), adviceNumber);
+        for (int startIndex = 0; startIndex < splitPoint.length - 1; startIndex++) {
+            List<T> objectsForTask = new ArrayList<T>();
+            int endIndex = startIndex + 1;
+            for (long i = splitPoint[startIndex]; i < splitPoint[endIndex]; i++) {
+                objectsForTask.add(objects.get((int) i));
+            }
+            if (!objectsForTask.isEmpty()) {
+                splitLists.add(objectsForTask);
+            }
+        }
+        return splitLists;
+    }
+
+}

+ 208 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/RetryUtil.java

@@ -0,0 +1,208 @@
+package cn.tr.plugin.dataX.common.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.*;
+
+public final class RetryUtil {
+
+    private static final Logger LOG = LoggerFactory.getLogger(RetryUtil.class);
+
+    private static final long MAX_SLEEP_MILLISECOND = 256 * 1000;
+
+    /**
+     * 重试次数工具方法.
+     *
+     * @param callable               实际逻辑
+     * @param retryTimes             最大重试次数(>1)
+     * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+     * @param exponential            休眠时间是否指数递增
+     * @param <T>                    返回值类型
+     * @return 经过重试的callable的执行结果
+     */
+    public static <T> T executeWithRetry(Callable<T> callable,
+                                         int retryTimes,
+                                         long sleepTimeInMilliSecond,
+                                         boolean exponential) throws Exception {
+        Retry retry = new Retry();
+        return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null);
+    }
+    
+    /**
+     * 重试次数工具方法.
+     *
+     * @param callable               实际逻辑
+     * @param retryTimes             最大重试次数(>1)
+     * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+     * @param exponential            休眠时间是否指数递增
+     * @param <T>                    返回值类型
+     * @param retryExceptionClasss   出现指定的异常类型时才进行重试
+     * @return 经过重试的callable的执行结果
+     */
+    public static <T> T executeWithRetry(Callable<T> callable,
+                                         int retryTimes,
+                                         long sleepTimeInMilliSecond,
+                                         boolean exponential,
+                                         List<Class<?>> retryExceptionClasss) throws Exception {
+        Retry retry = new Retry();
+        return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, retryExceptionClasss);
+    }
+
+    /**
+     * 在外部线程执行并且重试。每次执行需要在timeoutMs内执行完,不然视为失败。
+     * 执行异步操作的线程池从外部传入,线程池的共享粒度由外部控制。比如,HttpClientUtil共享一个线程池。
+     * <p/>
+     * 限制条件:仅仅能够在阻塞的时候interrupt线程
+     *
+     * @param callable               实际逻辑
+     * @param retryTimes             最大重试次数(>1)
+     * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+     * @param exponential            休眠时间是否指数递增
+     * @param timeoutMs              callable执行超时时间,毫秒
+     * @param executor               执行异步操作的线程池
+     * @param <T>                    返回值类型
+     * @return 经过重试的callable的执行结果
+     */
+    public static <T> T asyncExecuteWithRetry(Callable<T> callable,
+                                              int retryTimes,
+                                              long sleepTimeInMilliSecond,
+                                              boolean exponential,
+                                              long timeoutMs,
+                                              ThreadPoolExecutor executor) throws Exception {
+        Retry retry = new AsyncRetry(timeoutMs, executor);
+        return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null);
+    }
+
+    /**
+     * 创建异步执行的线程池。特性如下:
+     * core大小为0,初始状态下无线程,无初始消耗。
+     * max大小为5,最多五个线程。
+     * 60秒超时时间,闲置超过60秒线程会被回收。
+     * 使用SynchronousQueue,任务不会排队,必须要有可用线程才能提交成功,否则会RejectedExecutionException。
+     *
+     * @return 线程池
+     */
+    public static ThreadPoolExecutor createThreadPoolExecutor() {
+        return new ThreadPoolExecutor(0, 5,
+                60L, TimeUnit.SECONDS,
+                new SynchronousQueue<Runnable>());
+    }
+
+
+    private static class Retry {
+
+        public <T> T doRetry(Callable<T> callable, int retryTimes, long sleepTimeInMilliSecond, boolean exponential, List<Class<?>> retryExceptionClasss)
+                throws Exception {
+
+            if (null == callable) {
+                throw new IllegalArgumentException("系统编程错误, 入参callable不能为空 ! ");
+            }
+
+            if (retryTimes < 1) {
+                throw new IllegalArgumentException(String.format(
+                        "系统编程错误, 入参retrytime[%d]不能小于1 !", retryTimes));
+            }
+
+            Exception saveException = null;
+            for (int i = 0; i < retryTimes; i++) {
+                try {
+                    return call(callable);
+                } catch (Exception e) {
+                    saveException = e;
+                    if (i == 0) {
+                        LOG.error(String.format("Exception when calling callable, 异常Msg:%s", saveException.getMessage()), saveException);
+                    }
+                    
+                    if (null != retryExceptionClasss && !retryExceptionClasss.isEmpty()) {
+                        boolean needRetry = false;
+                        for (Class<?> eachExceptionClass : retryExceptionClasss) {
+                            if (eachExceptionClass == e.getClass()) {
+                                needRetry = true;
+                                break;
+                            }
+                        }
+                        if (!needRetry) {
+                            throw saveException;
+                        }
+                    }
+                    
+                    if (i + 1 < retryTimes && sleepTimeInMilliSecond > 0) {
+                        long startTime = System.currentTimeMillis();
+
+                        long timeToSleep;
+                        if (exponential) {
+                            timeToSleep = sleepTimeInMilliSecond * (long) Math.pow(2, i);
+                            if(timeToSleep >= MAX_SLEEP_MILLISECOND) {
+                                timeToSleep = MAX_SLEEP_MILLISECOND;
+                            }
+                        } else {
+                            timeToSleep = sleepTimeInMilliSecond;
+                            if(timeToSleep >= MAX_SLEEP_MILLISECOND) {
+                                timeToSleep = MAX_SLEEP_MILLISECOND;
+                            }
+                        }
+
+                        try {
+                            Thread.sleep(timeToSleep);
+                        } catch (InterruptedException ignored) {
+                        }
+
+                        long realTimeSleep = System.currentTimeMillis()-startTime;
+
+                        LOG.error(String.format("Exception when calling callable, 即将尝试执行第%s次重试.本次重试计划等待[%s]ms,实际等待[%s]ms, 异常Msg:[%s]",
+                                i+1, timeToSleep,realTimeSleep, e.getMessage()));
+
+                    }
+                }
+            }
+            throw saveException;
+        }
+
+        protected <T> T call(Callable<T> callable) throws Exception {
+            return callable.call();
+        }
+    }
+
+    private static class AsyncRetry extends Retry {
+
+        private long timeoutMs;
+        private ThreadPoolExecutor executor;
+
+        public AsyncRetry(long timeoutMs, ThreadPoolExecutor executor) {
+            this.timeoutMs = timeoutMs;
+            this.executor = executor;
+        }
+
+        /**
+         * 使用传入的线程池异步执行任务,并且等待。
+         * <p/>
+         * future.get()方法,等待指定的毫秒数。如果任务在超时时间内结束,则正常返回。
+         * 如果抛异常(可能是执行超时、执行异常、被其他线程cancel或interrupt),都记录日志并且网上抛异常。
+         * 正常和非正常的情况都会判断任务是否结束,如果没有结束,则cancel任务。cancel参数为true,表示即使
+         * 任务正在执行,也会interrupt线程。
+         *
+         * @param callable
+         * @param <T>
+         * @return
+         * @throws Exception
+         */
+        @Override
+        protected <T> T call(Callable<T> callable) throws Exception {
+            Future<T> future = executor.submit(callable);
+            try {
+                return future.get(timeoutMs, TimeUnit.MILLISECONDS);
+            } catch (Exception e) {
+                LOG.warn("Try once failed", e);
+                throw e;
+            } finally {
+                if (!future.isDone()) {
+                    future.cancel(true);
+                    LOG.warn("Try once task not done, cancel it, active count: " + executor.getActiveCount());
+                }
+            }
+        }
+    }
+
+}

+ 110 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/common/util/StrUtil.java

@@ -0,0 +1,110 @@
+package cn.tr.plugin.dataX.common.util;
+
+
+import cn.hutool.extra.validation.ValidationUtil;
+import cn.tr.core.utils.ValidationUtils;
+import org.apache.el.util.Validation;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.text.DecimalFormat;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class StrUtil {
+
+    private final static long KB_IN_BYTES = 1024;
+
+    private final static long MB_IN_BYTES = 1024 * KB_IN_BYTES;
+
+    private final static long GB_IN_BYTES = 1024 * MB_IN_BYTES;
+
+    private final static long TB_IN_BYTES = 1024 * GB_IN_BYTES;
+
+    private final static DecimalFormat df = new DecimalFormat("0.00");
+
+    private static final Pattern VARIABLE_PATTERN = Pattern
+            .compile("(\\$)\\{?(\\w+)\\}?");
+
+    private static String SYSTEM_ENCODING = System.getProperty("file.encoding");
+
+    static {
+        if (SYSTEM_ENCODING == null) {
+            SYSTEM_ENCODING = "UTF-8";
+        }
+    }
+
+    private StrUtil() {
+    }
+
+    public static String stringify(long byteNumber) {
+        if (byteNumber / TB_IN_BYTES > 0) {
+            return df.format((double) byteNumber / (double) TB_IN_BYTES) + "TB";
+        } else if (byteNumber / GB_IN_BYTES > 0) {
+            return df.format((double) byteNumber / (double) GB_IN_BYTES) + "GB";
+        } else if (byteNumber / MB_IN_BYTES > 0) {
+            return df.format((double) byteNumber / (double) MB_IN_BYTES) + "MB";
+        } else if (byteNumber / KB_IN_BYTES > 0) {
+            return df.format((double) byteNumber / (double) KB_IN_BYTES) + "KB";
+        } else {
+            return String.valueOf(byteNumber) + "B";
+        }
+    }
+
+
+    public static String replaceVariable(final String param) {
+        Map<String, String> mapping = new HashMap<String, String>();
+
+        Matcher matcher = VARIABLE_PATTERN.matcher(param);
+        while (matcher.find()) {
+            String variable = matcher.group(2);
+            String value = System.getProperty(variable);
+            if (cn.hutool.core.util.StrUtil.isBlank(value)) {
+                value = matcher.group();
+            }
+            mapping.put(matcher.group(), value);
+        }
+
+        String retString = param;
+        for (final String key : mapping.keySet()) {
+            retString = retString.replace(key, mapping.get(key));
+        }
+
+        return retString;
+    }
+
+    public static String compressMiddle(String s, int headLength, int tailLength) {
+        if (s == null) {
+            throw new NullPointerException("Input string must not be null");
+        }
+        if (headLength <= 0) {
+            throw new IllegalArgumentException("Head length must be larger than 0");
+        }
+        if (tailLength <= 0) {
+            throw new IllegalArgumentException("Tail length must be larger than 0");
+        }
+        if(headLength + tailLength >= s.length()) {
+            return s;
+        }
+        return s.substring(0, headLength) + "..." + s.substring(s.length() - tailLength);
+    }
+
+    public static String getMd5(String plainText) {
+        try {
+            StringBuilder builder = new StringBuilder();
+            for (byte b : MessageDigest.getInstance("MD5").digest(plainText.getBytes())) {
+                int i = b & 0xff;
+                if (i < 0x10) {
+                    builder.append('0');
+                }
+                builder.append(Integer.toHexString(i));
+            }
+            return builder.toString();
+        } catch (NoSuchAlgorithmException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+}

+ 35 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/AbstractContainer.java

@@ -0,0 +1,35 @@
+package cn.tr.plugin.dataX.core;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.util.Validate;
+
+/**
+ * 执行容器的抽象类,持有该容器全局的配置 configuration
+ */
+public abstract class AbstractContainer {
+    protected Configuration configuration;
+
+    protected AbstractContainerCommunicator containerCommunicator;
+
+    public AbstractContainer(Configuration configuration) {
+        Validate.notNull(configuration, "Configuration can not be null.");
+
+        this.configuration = configuration;
+    }
+
+    public Configuration getConfiguration() {
+        return configuration;
+    }
+
+    public AbstractContainerCommunicator getContainerCommunicator() {
+        return containerCommunicator;
+    }
+
+    public void setContainerCommunicator(AbstractContainerCommunicator containerCommunicator) {
+        this.containerCommunicator = containerCommunicator;
+    }
+
+    public abstract void start();
+
+}

+ 227 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/Engine.java

@@ -0,0 +1,227 @@
+package cn.tr.plugin.dataX.core;
+
+import cn.hutool.core.util.StrUtil;
+import cn.tr.core.exception.BaseCode;
+import cn.tr.plugin.dataX.common.element.ColumnCast;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.exception.ExceptionTracker;
+import cn.tr.plugin.dataX.common.statistics.PerfTrace;
+import cn.tr.plugin.dataX.common.statistics.VMInfo;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.common.util.MessageSource;
+import cn.tr.plugin.dataX.core.job.JobContainer;
+import cn.tr.plugin.dataX.core.taskgroup.TaskGroupContainer;
+import cn.tr.plugin.dataX.core.util.ConfigParser;
+import cn.tr.plugin.dataX.core.util.ConfigurationValidate;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import cn.tr.plugin.dataX.core.util.container.LoadUtil;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Engine是DataX入口类,该类负责初始化Job或者Task的运行容器,并运行插件的Job或者Task逻辑
+ */
+public class Engine {
+    private static final Logger LOG = LoggerFactory.getLogger(Engine.class);
+
+    private static String RUNTIME_MODE;
+
+    /* check job model (job/task) first */
+    public void start(Configuration allConf) {
+
+        // 绑定column转换信息
+        ColumnCast.bind(allConf);
+
+        /**
+         * 初始化PluginLoader,可以获取各种插件配置
+         */
+        LoadUtil.bind(allConf);
+
+        boolean isJob = !("taskGroup".equalsIgnoreCase(allConf
+                .getString(CoreConstant.DATAX_CORE_CONTAINER_MODEL)));
+        //JobContainer会在schedule后再行进行设置和调整值
+        int channelNumber =0;
+        AbstractContainer container;
+        long instanceId;
+        int taskGroupId = -1;
+        if (isJob) {
+            allConf.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, RUNTIME_MODE);
+            container = new JobContainer(allConf);
+            instanceId = allConf.getLong(
+                    CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, 0);
+
+        } else {
+            container = new TaskGroupContainer(allConf);
+            instanceId = allConf.getLong(
+                    CoreConstant.DATAX_CORE_CONTAINER_JOB_ID);
+            taskGroupId = allConf.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID);
+            channelNumber = allConf.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL);
+        }
+
+        //缺省打开perfTrace
+        boolean traceEnable = allConf.getBool(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, true);
+        boolean perfReportEnable = allConf.getBool(CoreConstant.DATAX_CORE_REPORT_DATAX_PERFLOG, true);
+
+        //standalone模式的 datax shell任务不进行汇报
+        if(instanceId == -1){
+            perfReportEnable = false;
+        }
+
+        int priority = 0;
+        try {
+            priority = Integer.parseInt(System.getenv("SKYNET_PRIORITY"));
+        }catch (NumberFormatException e){
+            LOG.warn("prioriy set to 0, because NumberFormatException, the value is: "+ System.getProperty("PROIORY"));
+        }
+
+        Configuration jobInfoConfig = allConf.getConfiguration(CoreConstant.DATAX_JOB_JOBINFO);
+        //初始化PerfTrace
+        PerfTrace perfTrace = PerfTrace.getInstance(isJob, instanceId, taskGroupId, priority, traceEnable);
+        perfTrace.setJobInfo(jobInfoConfig,perfReportEnable,channelNumber);
+        container.start();
+
+    }
+
+
+    // 注意屏蔽敏感信息
+    public static String filterJobConfiguration(final Configuration configuration) {
+        Configuration jobConfWithSetting = configuration.getConfiguration("job").clone();
+
+        Configuration jobContent = jobConfWithSetting.getConfiguration("content");
+
+        filterSensitiveConfiguration(jobContent);
+
+        jobConfWithSetting.set("content",jobContent);
+
+        return jobConfWithSetting.beautify();
+    }
+
+    public static Configuration filterSensitiveConfiguration(Configuration configuration){
+        Set<String> keys = configuration.getKeys();
+        for (final String key : keys) {
+            boolean isSensitive = StrUtil.endWithAnyIgnoreCase(key, "password")
+                    || StrUtil.endWithAnyIgnoreCase(key, "accessKey");
+            if (isSensitive && configuration.get(key) instanceof String) {
+                configuration.set(key, configuration.getString(key).replaceAll(".", "*"));
+            }
+        }
+        return configuration;
+    }
+
+    public static void entry(final String[] args) throws Throwable {
+        Options options = new Options();
+        options.addOption("job", true, "Job config.");
+        options.addOption("jobid", true, "Job unique id.");
+        options.addOption("mode", true, "Job runtime mode.");
+
+        BasicParser parser = new BasicParser();
+        CommandLine cl = parser.parse(options, args);
+
+        String jobPath = cl.getOptionValue("job");
+
+        // 如果用户没有明确指定jobid, 则 datax.py 会指定 jobid 默认值为-1
+        String jobIdString = cl.getOptionValue("jobid");
+        RUNTIME_MODE = cl.getOptionValue("mode");
+
+        Configuration configuration = ConfigParser.parse(jobPath);
+        // 绑定i18n信息
+        MessageSource.init(configuration);
+        MessageSource.reloadResourceBundle(Configuration.class);
+
+        long jobId;
+        if (!"-1".equalsIgnoreCase(jobIdString)) {
+            jobId = Long.parseLong(jobIdString);
+        } else {
+            // only for dsc & ds & datax 3 update
+            String dscJobUrlPatternString = "/instance/(\\d{1,})/config.xml";
+            String dsJobUrlPatternString = "/inner/job/(\\d{1,})/config";
+            String dsTaskGroupUrlPatternString = "/inner/job/(\\d{1,})/taskGroup/";
+            List<String> patternStringList = Arrays.asList(dscJobUrlPatternString,
+                    dsJobUrlPatternString, dsTaskGroupUrlPatternString);
+            jobId = parseJobIdFromUrl(patternStringList, jobPath);
+        }
+
+        boolean isStandAloneMode = "standalone".equalsIgnoreCase(RUNTIME_MODE);
+        if (!isStandAloneMode && jobId == -1) {
+            // 如果不是 standalone 模式,那么 jobId 一定不能为-1
+            throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "非 standalone 模式必须在 URL 中提供有效的 jobId.");
+        }
+        configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, jobId);
+
+        //打印vmInfo
+        VMInfo vmInfo = VMInfo.getVmInfo();
+        if (vmInfo != null) {
+            LOG.info(vmInfo.toString());
+        }
+
+        LOG.info("\n" + Engine.filterJobConfiguration(configuration) + "\n");
+
+        LOG.debug(configuration.toJSON());
+
+        ConfigurationValidate.doValidate(configuration);
+        Engine engine = new Engine();
+        engine.start(configuration);
+    }
+
+
+    /**
+     * -1 表示未能解析到 jobId
+     *
+     *  only for dsc & ds & datax 3 update
+     */
+    private static long parseJobIdFromUrl(List<String> patternStringList, String url) {
+        long result = -1;
+        for (String patternString : patternStringList) {
+            result = doParseJobIdFromUrl(patternString, url);
+            if (result != -1) {
+                return result;
+            }
+        }
+        return result;
+    }
+
+    private static long doParseJobIdFromUrl(String patternString, String url) {
+        Pattern pattern = Pattern.compile(patternString);
+        Matcher matcher = pattern.matcher(url);
+        if (matcher.find()) {
+            return Long.parseLong(matcher.group(1));
+        }
+
+        return -1;
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = 0;
+        try {
+            Engine.entry(args);
+        } catch (Throwable e) {
+            exitCode = 1;
+            LOG.error("\n\n经DataX智能分析,该任务最可能的错误原因是:\n" + ExceptionTracker.trace(e));
+
+            if (e instanceof DataXException) {
+                DataXException tempException = (DataXException) e;
+                BaseCode errorCode = tempException.getCode();
+                if (errorCode instanceof FrameworkErrorCode) {
+                    FrameworkErrorCode tempErrorCode = (FrameworkErrorCode) errorCode;
+                    exitCode = tempErrorCode.toExitValue();
+                }
+            }
+
+            System.exit(exitCode);
+        }
+        System.exit(exitCode);
+    }
+
+}

+ 5 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings.properties

@@ -0,0 +1,5 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+engine.1=\u975e standalone \u6a21\u5f0f\u5fc5\u987b\u5728 URL \u4e2d\u63d0\u4f9b\u6709\u6548\u7684 jobId.    
+engine.2=\n\n\u7ecfDataX\u667a\u80fd\u5206\u6790,\u8be5\u4efb\u52a1\u6700\u53ef\u80fd\u7684\u9519\u8bef\u539f\u56e0\u662f:\n{0}
+

+ 5 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_en_US.properties

@@ -0,0 +1,5 @@
+very_like_yixiao=1{0}2{1}3
+
+engine.1=A valid job ID must be provided in the URL for the non-standalone mode.    
+engine.2=\n\nThrough the intelligent analysis by DataX, the most likely error reason of this task is: \n{0}
+

+ 5 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_ja_JP.properties

@@ -0,0 +1,5 @@
+very_like_yixiao=1{0}2{1}3
+
+engine.1=\u975e standalone \u6a21\u5f0f\u5fc5\u987b\u5728 URL \u4e2d\u63d0\u4f9b\u6709\u6548\u7684 jobId.    
+engine.2=\n\n\u7ecfDataX\u667a\u80fd\u5206\u6790,\u8be5\u4efb\u52a1\u6700\u53ef\u80fd\u7684\u9519\u8bef\u539f\u56e0\u662f:\n{0}
+

+ 5 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_CN.properties

@@ -0,0 +1,5 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+engine.1=\u975e standalone \u6a21\u5f0f\u5fc5\u987b\u5728 URL \u4e2d\u63d0\u4f9b\u6709\u6548\u7684 jobId.    
+engine.2=\n\n\u7ecfDataX\u667a\u80fd\u5206\u6790,\u8be5\u4efb\u52a1\u6700\u53ef\u80fd\u7684\u9519\u8bef\u539f\u56e0\u662f:\n{0}
+

+ 10 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_HK.properties

@@ -0,0 +1,10 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+engine.1=\u975e standalone \u6a21\u5f0f\u5fc5\u987b\u5728 URL \u4e2d\u63d0\u4f9b\u6709\u6548\u7684 jobId.    
+engine.2=\n\n\u7ecfDataX\u667a\u80fd\u5206\u6790,\u8be5\u4efb\u52a1\u6700\u53ef\u80fd\u7684\u9519\u8bef\u539f\u56e0\u662f:\n{0}
+
+very_like_yixiao=一{0}二{1}三
+
+engine.1=非 standalone 模式必須在 URL 中提供有效的 jobId.    
+engine.2=\n\n經DataX智能分析,該任務最可能的錯誤原因是:\n{0}
+

+ 10 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/LocalStrings_zh_TW.properties

@@ -0,0 +1,10 @@
+very_like_yixiao=\u4e00{0}\u4e8c{1}\u4e09
+
+engine.1=\u975e standalone \u6a21\u5f0f\u5fc5\u987b\u5728 URL \u4e2d\u63d0\u4f9b\u6709\u6548\u7684 jobId.    
+engine.2=\n\n\u7ecfDataX\u667a\u80fd\u5206\u6790,\u8be5\u4efb\u52a1\u6700\u53ef\u80fd\u7684\u9519\u8bef\u539f\u56e0\u662f:\n{0}
+
+very_like_yixiao=一{0}二{1}三
+
+engine.1=非 standalone 模式必須在 URL 中提供有效的 jobId.    
+engine.2=\n\n經DataX智能分析,該任務最可能的錯誤原因是:\n{0}
+

+ 91 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/container/util/HookInvoker.java

@@ -0,0 +1,91 @@
+package cn.tr.plugin.dataX.core.container.util;
+
+/**
+ * Created by xiafei.qiuxf on 14/12/17.
+ */
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.spi.Hook;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+import cn.tr.plugin.dataX.core.util.container.JarLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+/**
+ * 扫描给定目录的所有一级子目录,每个子目录当作一个Hook的目录。
+ * 对于每个子目录,必须符合ServiceLoader的标准目录格式,见http://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html。
+ * 加载里头的jar,使用ServiceLoader机制调用。
+ */
+public class HookInvoker {
+
+    private static final Logger LOG = LoggerFactory.getLogger(HookInvoker.class);
+    private final Map<String, Number> msg;
+    private final Configuration conf;
+
+    private File baseDir;
+
+    public HookInvoker(String baseDirName, Configuration conf, Map<String, Number> msg) {
+        this.baseDir = new File(baseDirName);
+        this.conf = conf;
+        this.msg = msg;
+    }
+
+    public void invokeAll() {
+        if (!baseDir.exists() || baseDir.isFile()) {
+            LOG.info("No hook invoked, because base dir not exists or is a file: " + baseDir.getAbsolutePath());
+            return;
+        }
+
+        String[] subDirs = baseDir.list(new FilenameFilter() {
+            @Override
+            public boolean accept(File dir, String name) {
+                return new File(dir, name).isDirectory();
+            }
+        });
+
+        if (subDirs == null) {
+            throw DataXException.asDataXException(FrameworkErrorCode.HOOK_LOAD_ERROR, "获取HOOK子目录返回null");
+        }
+
+        for (String subDir : subDirs) {
+            doInvoke(new File(baseDir, subDir).getAbsolutePath());
+        }
+
+    }
+
+    private void doInvoke(String path) {
+        ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
+        try {
+            JarLoader jarLoader = new JarLoader(new String[]{path});
+            Thread.currentThread().setContextClassLoader(jarLoader);
+            Iterator<Hook> hookIt = ServiceLoader.load(Hook.class).iterator();
+            if (!hookIt.hasNext()) {
+                LOG.warn("No hook defined under path: " + path);
+            } else {
+                Hook hook = hookIt.next();
+                LOG.info("Invoke hook [{}], path: {}", hook.getName(), path);
+                hook.invoke(conf, msg);
+            }
+        } catch (Exception e) {
+            LOG.error("Exception when invoke hook", e);
+            throw DataXException.asDataXException(
+                    TRExcCode.HOOK_INTERNAL_ERROR, "Exception when invoke hook", e);
+        } finally {
+            Thread.currentThread().setContextClassLoader(oldClassLoader);
+        }
+    }
+
+    public static void main(String[] args) {
+        new HookInvoker("/Users/xiafei/workspace/datax3/target/datax/datax/hook",
+                null, new HashMap<String, Number>()).invokeAll();
+    }
+}

+ 178 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/container/util/JobAssignUtil.java

@@ -0,0 +1,178 @@
+package cn.tr.plugin.dataX.core.container.util;
+
+import cn.hutool.core.util.StrUtil;
+import cn.tr.plugin.dataX.common.constant.CommonConstant;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+
+import java.util.*;
+
+public final class JobAssignUtil {
+    private JobAssignUtil() {
+    }
+
+    /**
+     * 公平的分配 task 到对应的 taskGroup 中。
+     * 公平体现在:会考虑 task 中对资源负载作的 load 标识进行更均衡的作业分配操作。
+     * TODO 具体文档举例说明
+     */
+    public static List<Configuration> assignFairly(Configuration configuration, int channelNumber, int channelsPerTaskGroup) {
+
+        Validate.isTrue(configuration != null, "框架获得的 Job 不能为 null.");
+
+        List<Configuration> contentConfig = configuration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+        Validate.isTrue(contentConfig.size() > 0, "框架获得的切分后的 Job 无内容.");
+
+        Validate.isTrue(channelNumber > 0 && channelsPerTaskGroup > 0,
+                "每个channel的平均task数[averTaskPerChannel],channel数目[channelNumber],每个taskGroup的平均channel数[channelsPerTaskGroup]都应该为正数");
+
+        int taskGroupNumber = (int) Math.ceil(1.0 * channelNumber / channelsPerTaskGroup);
+
+        Configuration aTaskConfig = contentConfig.get(0);
+
+        String readerResourceMark = aTaskConfig.getString(CoreConstant.JOB_READER_PARAMETER + "." +
+                CommonConstant.LOAD_BALANCE_RESOURCE_MARK);
+        String writerResourceMark = aTaskConfig.getString(CoreConstant.JOB_WRITER_PARAMETER + "." +
+                CommonConstant.LOAD_BALANCE_RESOURCE_MARK);
+
+        boolean hasLoadBalanceResourceMark = StrUtil.isNotBlank(readerResourceMark) ||
+                StrUtil.isNotBlank(writerResourceMark);
+
+        if (!hasLoadBalanceResourceMark) {
+            // fake 一个固定的 key 作为资源标识(在 reader 或者 writer 上均可,此处选择在 reader 上进行 fake)
+            for (Configuration conf : contentConfig) {
+                conf.set(CoreConstant.JOB_READER_PARAMETER + "." +
+                        CommonConstant.LOAD_BALANCE_RESOURCE_MARK, "aFakeResourceMarkForLoadBalance");
+            }
+            // 是为了避免某些插件没有设置 资源标识 而进行了一次随机打乱操作
+            Collections.shuffle(contentConfig, new Random(System.currentTimeMillis()));
+        }
+
+        LinkedHashMap<String, List<Integer>> resourceMarkAndTaskIdMap = parseAndGetResourceMarkAndTaskIdMap(contentConfig);
+        List<Configuration> taskGroupConfig = doAssign(resourceMarkAndTaskIdMap, configuration, taskGroupNumber);
+
+        // 调整 每个 taskGroup 对应的 Channel 个数(属于优化范畴)
+        adjustChannelNumPerTaskGroup(taskGroupConfig, channelNumber);
+        return taskGroupConfig;
+    }
+
+    private static void adjustChannelNumPerTaskGroup(List<Configuration> taskGroupConfig, int channelNumber) {
+        int taskGroupNumber = taskGroupConfig.size();
+        int avgChannelsPerTaskGroup = channelNumber / taskGroupNumber;
+        int remainderChannelCount = channelNumber % taskGroupNumber;
+        // 表示有 remainderChannelCount 个 taskGroup,其对应 Channel 个数应该为:avgChannelsPerTaskGroup + 1;
+        // (taskGroupNumber - remainderChannelCount)个 taskGroup,其对应 Channel 个数应该为:avgChannelsPerTaskGroup
+
+        int i = 0;
+        for (; i < remainderChannelCount; i++) {
+            taskGroupConfig.get(i).set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, avgChannelsPerTaskGroup + 1);
+        }
+
+        for (int j = 0; j < taskGroupNumber - remainderChannelCount; j++) {
+            taskGroupConfig.get(i + j).set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, avgChannelsPerTaskGroup);
+        }
+    }
+
+    /**
+     * 根据task 配置,获取到:
+     * 资源名称 --> taskId(List) 的 map 映射关系
+     */
+    private static LinkedHashMap<String, List<Integer>> parseAndGetResourceMarkAndTaskIdMap(List<Configuration> contentConfig) {
+        // key: resourceMark, value: taskId
+        LinkedHashMap<String, List<Integer>> readerResourceMarkAndTaskIdMap = new LinkedHashMap<String, List<Integer>>();
+        LinkedHashMap<String, List<Integer>> writerResourceMarkAndTaskIdMap = new LinkedHashMap<String, List<Integer>>();
+
+        for (Configuration aTaskConfig : contentConfig) {
+            int taskId = aTaskConfig.getInt(CoreConstant.TASK_ID);
+            // 把 readerResourceMark 加到 readerResourceMarkAndTaskIdMap 中
+            String readerResourceMark = aTaskConfig.getString(CoreConstant.JOB_READER_PARAMETER + "." + CommonConstant.LOAD_BALANCE_RESOURCE_MARK);
+            if (readerResourceMarkAndTaskIdMap.get(readerResourceMark) == null) {
+                readerResourceMarkAndTaskIdMap.put(readerResourceMark, new LinkedList<Integer>());
+            }
+            readerResourceMarkAndTaskIdMap.get(readerResourceMark).add(taskId);
+
+            // 把 writerResourceMark 加到 writerResourceMarkAndTaskIdMap 中
+            String writerResourceMark = aTaskConfig.getString(CoreConstant.JOB_WRITER_PARAMETER + "." + CommonConstant.LOAD_BALANCE_RESOURCE_MARK);
+            if (writerResourceMarkAndTaskIdMap.get(writerResourceMark) == null) {
+                writerResourceMarkAndTaskIdMap.put(writerResourceMark, new LinkedList<Integer>());
+            }
+            writerResourceMarkAndTaskIdMap.get(writerResourceMark).add(taskId);
+        }
+
+        if (readerResourceMarkAndTaskIdMap.size() >= writerResourceMarkAndTaskIdMap.size()) {
+            // 采用 reader 对资源做的标记进行 shuffle
+            return readerResourceMarkAndTaskIdMap;
+        } else {
+            // 采用 writer 对资源做的标记进行 shuffle
+            return writerResourceMarkAndTaskIdMap;
+        }
+    }
+
+
+    /**
+     * /**
+     * 需要实现的效果通过例子来说是:
+     * <pre>
+     * a 库上有表:0, 1, 2
+     * b 库上有表:3, 4
+     * c 库上有表:5, 6, 7
+     *
+     * 如果有 4个 taskGroup
+     * 则 assign 后的结果为:
+     * taskGroup-0: 0,  4,
+     * taskGroup-1: 3,  6,
+     * taskGroup-2: 5,  2,
+     * taskGroup-3: 1,  7
+     *
+     * </pre>
+     */
+    private static List<Configuration> doAssign(LinkedHashMap<String, List<Integer>> resourceMarkAndTaskIdMap, Configuration jobConfiguration, int taskGroupNumber) {
+        List<Configuration> contentConfig = jobConfiguration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+
+        Configuration taskGroupTemplate = jobConfiguration.clone();
+        taskGroupTemplate.remove(CoreConstant.DATAX_JOB_CONTENT);
+
+        List<Configuration> result = new LinkedList<Configuration>();
+
+        List<List<Configuration>> taskGroupConfigList = new ArrayList<List<Configuration>>(taskGroupNumber);
+        for (int i = 0; i < taskGroupNumber; i++) {
+            taskGroupConfigList.add(new LinkedList<Configuration>());
+        }
+
+        int mapValueMaxLength = -1;
+
+        List<String> resourceMarks = new ArrayList<String>();
+        for (Map.Entry<String, List<Integer>> entry : resourceMarkAndTaskIdMap.entrySet()) {
+            resourceMarks.add(entry.getKey());
+            if (entry.getValue().size() > mapValueMaxLength) {
+                mapValueMaxLength = entry.getValue().size();
+            }
+        }
+
+        int taskGroupIndex = 0;
+        for (int i = 0; i < mapValueMaxLength; i++) {
+            for (String resourceMark : resourceMarks) {
+                if (resourceMarkAndTaskIdMap.get(resourceMark).size() > 0) {
+                    int taskId = resourceMarkAndTaskIdMap.get(resourceMark).get(0);
+                    taskGroupConfigList.get(taskGroupIndex % taskGroupNumber).add(contentConfig.get(taskId));
+                    taskGroupIndex++;
+
+                    resourceMarkAndTaskIdMap.get(resourceMark).remove(0);
+                }
+            }
+        }
+
+        Configuration tempTaskGroupConfig;
+        for (int i = 0; i < taskGroupNumber; i++) {
+            tempTaskGroupConfig = taskGroupTemplate.clone();
+            tempTaskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, taskGroupConfigList.get(i));
+            tempTaskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, i);
+
+            result.add(tempTaskGroupConfig);
+        }
+
+        return result;
+    }
+
+}

+ 975 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/JobContainer.java

@@ -0,0 +1,975 @@
+package cn.tr.plugin.dataX.core.job;
+
+import cn.hutool.core.util.StrUtil;
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.constant.PluginType;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.plugin.AbstractJobPlugin;
+import cn.tr.plugin.dataX.common.plugin.JobPluginCollector;
+import cn.tr.plugin.dataX.common.spi.Reader;
+import cn.tr.plugin.dataX.common.spi.Writer;
+import cn.tr.plugin.dataX.common.statistics.PerfTrace;
+import cn.tr.plugin.dataX.common.statistics.VMInfo;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.AbstractContainer;
+import cn.tr.plugin.dataX.core.Engine;
+import cn.tr.plugin.dataX.core.container.util.HookInvoker;
+import cn.tr.plugin.dataX.core.container.util.JobAssignUtil;
+import cn.tr.plugin.dataX.core.job.scheduler.AbstractScheduler;
+import cn.tr.plugin.dataX.core.job.scheduler.processinner.StandAloneScheduler;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator;
+import cn.tr.plugin.dataX.core.statistics.plugin.DefaultJobPluginCollector;
+import cn.tr.plugin.dataX.core.util.ErrorRecordChecker;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.ClassLoaderSwapper;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import cn.tr.plugin.dataX.core.util.container.LoadUtil;
+import cn.tr.plugin.dataX.dataxservice.face.domain.enums.ExecuteMode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by jingxing on 14-8-24.
+ * <p/>
+ * job实例运行在jobContainer容器中,它是所有任务的master,负责初始化、拆分、调度、运行、回收、监控和汇报
+ * 但它并不做实际的数据同步操作
+ */
+public class JobContainer extends AbstractContainer {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(JobContainer.class);
+
+    private static final SimpleDateFormat dateFormat = new SimpleDateFormat(
+            "yyyy-MM-dd HH:mm:ss");
+
+    private ClassLoaderSwapper classLoaderSwapper = ClassLoaderSwapper
+            .newCurrentThreadClassLoaderSwapper();
+
+    private long jobId;
+
+    private String readerPluginName;
+
+    private String writerPluginName;
+
+    /**
+     * reader和writer jobContainer的实例
+     */
+    private Reader.Job jobReader;
+
+    private Writer.Job jobWriter;
+
+    private Configuration userConf;
+
+    private long startTimeStamp;
+
+    private long endTimeStamp;
+
+    private long startTransferTimeStamp;
+
+    private long endTransferTimeStamp;
+
+    private int needChannelNumber;
+
+    private int totalStage = 1;
+
+    private ErrorRecordChecker errorLimit;
+
+    public JobContainer(Configuration configuration) {
+        super(configuration);
+
+        errorLimit = new ErrorRecordChecker(configuration);
+    }
+
+    /**
+     * jobContainer主要负责的工作全部在start()里面,包括init、prepare、split、scheduler、
+     * post以及destroy和statistics
+     */
+    @Override
+    public void start() {
+        LOG.info("DataX jobContainer starts job.");
+
+        boolean hasException = false;
+        boolean isDryRun = false;
+        try {
+            this.startTimeStamp = System.currentTimeMillis();
+            isDryRun = configuration.getBool(CoreConstant.DATAX_JOB_SETTING_DRYRUN, false);
+            if(isDryRun) {
+                LOG.info("jobContainer starts to do preCheck ...");
+                this.preCheck();
+            } else {
+                userConf = configuration.clone();
+                LOG.debug("jobContainer starts to do preHandle ...");
+                this.preHandle();
+
+                LOG.debug("jobContainer starts to do init ...");
+                this.init();
+                LOG.info("jobContainer starts to do prepare ...");
+                this.prepare();
+                LOG.info("jobContainer starts to do split ...");
+                this.totalStage = this.split();
+                LOG.info("jobContainer starts to do schedule ...");
+                this.schedule();
+                LOG.debug("jobContainer starts to do post ...");
+                this.post();
+
+                LOG.debug("jobContainer starts to do postHandle ...");
+                this.postHandle();
+                LOG.info("DataX jobId [{}] completed successfully.", this.jobId);
+
+                this.invokeHooks();
+            }
+        } catch (Throwable e) {
+            LOG.error("Exception when job run", e);
+
+            hasException = true;
+
+            if (e instanceof OutOfMemoryError) {
+                this.destroy();
+                System.gc();
+            }
+
+
+            if (super.getContainerCommunicator() == null) {
+                // 由于 containerCollector 是在 scheduler() 中初始化的,所以当在 scheduler() 之前出现异常时,需要在此处对 containerCollector 进行初始化
+
+                AbstractContainerCommunicator tempContainerCollector;
+                // standalone
+                tempContainerCollector = new StandAloneJobContainerCommunicator(configuration);
+
+                super.setContainerCommunicator(tempContainerCollector);
+            }
+
+            Communication communication = super.getContainerCommunicator().collect();
+            // 汇报前的状态,不需要手动进行设置
+            // communication.setState(State.FAILED);
+            communication.setThrowable(e);
+            communication.setTimestamp(this.endTimeStamp);
+
+            Communication tempComm = new Communication();
+            tempComm.setTimestamp(this.startTransferTimeStamp);
+
+            Communication reportCommunication = CommunicationTool.getReportCommunication(communication, tempComm, this.totalStage);
+            super.getContainerCommunicator().report(reportCommunication);
+
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.RUNTIME_ERROR, e);
+        } finally {
+            if(!isDryRun) {
+
+                this.destroy();
+                this.endTimeStamp = System.currentTimeMillis();
+                if (!hasException) {
+                    //最后打印cpu的平均消耗,GC的统计
+                    VMInfo vmInfo = VMInfo.getVmInfo();
+                    if (vmInfo != null) {
+                        vmInfo.getDelta(false);
+                        LOG.info(vmInfo.totalString());
+                    }
+
+                    LOG.info(PerfTrace.getInstance().summarizeNoException());
+                    this.logStatistics();
+                }
+            }
+        }
+    }
+
+    private void preCheck() {
+        this.preCheckInit();
+        this.adjustChannelNumber();
+
+        if (this.needChannelNumber <= 0) {
+            this.needChannelNumber = 1;
+        }
+        this.preCheckReader();
+        this.preCheckWriter();
+        LOG.info("PreCheck通过");
+    }
+
+    private void preCheckInit() {
+        this.jobId = this.configuration.getLong(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, -1);
+
+        if (this.jobId < 0) {
+            LOG.info("Set jobId = 0");
+            this.jobId = 0;
+            this.configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID,
+                    this.jobId);
+        }
+
+        Thread.currentThread().setName("job-" + this.jobId);
+
+        JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector(
+                this.getContainerCommunicator());
+        this.jobReader = this.preCheckReaderInit(jobPluginCollector);
+        this.jobWriter = this.preCheckWriterInit(jobPluginCollector);
+    }
+
+    private Reader.Job preCheckReaderInit(JobPluginCollector jobPluginCollector) {
+        this.readerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_CONTENT_READER_NAME);
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+
+        Reader.Job jobReader = (Reader.Job) LoadUtil.loadJobPlugin(
+                PluginType.READER, this.readerPluginName);
+
+        this.configuration.set(CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER + ".dryRun", true);
+
+        // 设置reader的jobConfig
+        jobReader.setPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER));
+        // 设置reader的readerConfig
+        jobReader.setPeerPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER));
+
+        jobReader.setJobPluginCollector(jobPluginCollector);
+
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+        return jobReader;
+    }
+
+
+    private Writer.Job preCheckWriterInit(JobPluginCollector jobPluginCollector) {
+        this.writerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME);
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+
+        Writer.Job jobWriter = (Writer.Job) LoadUtil.loadJobPlugin(
+                PluginType.WRITER, this.writerPluginName);
+
+        this.configuration.set(CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER + ".dryRun", true);
+
+        // 设置writer的jobConfig
+        jobWriter.setPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER));
+        // 设置reader的readerConfig
+        jobWriter.setPeerPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER));
+
+        jobWriter.setPeerPluginName(this.readerPluginName);
+        jobWriter.setJobPluginCollector(jobPluginCollector);
+
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+
+        return jobWriter;
+    }
+
+    private void preCheckReader() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+        LOG.info(String.format("DataX Reader.Job [%s] do preCheck work .",
+                this.readerPluginName));
+        this.jobReader.preCheck();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    private void preCheckWriter() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+        LOG.info(String.format("DataX Writer.Job [%s] do preCheck work .",
+                this.writerPluginName));
+        this.jobWriter.preCheck();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    /**
+     * reader和writer的初始化
+     */
+    private void init() {
+        this.jobId = this.configuration.getLong(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, -1);
+
+        if (this.jobId < 0) {
+            LOG.info("Set jobId = 0");
+            this.jobId = 0;
+            this.configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID,
+                    this.jobId);
+        }
+
+        Thread.currentThread().setName("job-" + this.jobId);
+
+        JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector(
+                this.getContainerCommunicator());
+        //必须先Reader ,后Writer
+        this.jobReader = this.initJobReader(jobPluginCollector);
+        this.jobWriter = this.initJobWriter(jobPluginCollector);
+    }
+
+    private void prepare() {
+        this.prepareJobReader();
+        this.prepareJobWriter();
+    }
+
+    private void preHandle() {
+        String handlerPluginTypeStr = this.configuration.getString(
+                CoreConstant.DATAX_JOB_PREHANDLER_PLUGINTYPE);
+        if(!StrUtil.isNotEmpty(handlerPluginTypeStr)){
+            return;
+        }
+        PluginType handlerPluginType;
+        try {
+            handlerPluginType = PluginType.valueOf(handlerPluginTypeStr.toUpperCase());
+        } catch (IllegalArgumentException e) {
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.CONFIG_ERROR,
+                    String.format("Job preHandler's pluginType(%s) set error, reason(%s)", handlerPluginTypeStr.toUpperCase(), e.getMessage()));
+        }
+
+        String handlerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_PREHANDLER_PLUGINNAME);
+
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                handlerPluginType, handlerPluginName));
+
+        AbstractJobPlugin handler = LoadUtil.loadJobPlugin(
+                handlerPluginType, handlerPluginName);
+
+        JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector(
+                this.getContainerCommunicator());
+        handler.setJobPluginCollector(jobPluginCollector);
+
+        //todo configuration的安全性,将来必须保证
+        handler.preHandler(configuration);
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+
+        LOG.info("After PreHandler: \n" + Engine.filterJobConfiguration(configuration) + "\n");
+    }
+
+    private void postHandle() {
+        String handlerPluginTypeStr = this.configuration.getString(
+                CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINTYPE);
+
+        if(!StrUtil.isNotEmpty(handlerPluginTypeStr)){
+            return;
+        }
+        PluginType handlerPluginType;
+        try {
+            handlerPluginType = PluginType.valueOf(handlerPluginTypeStr.toUpperCase());
+        } catch (IllegalArgumentException e) {
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.CONFIG_ERROR,
+                    String.format("Job postHandler's pluginType(%s) set error, reason(%s)", handlerPluginTypeStr.toUpperCase(), e.getMessage()));
+        }
+
+        String handlerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINNAME);
+
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                handlerPluginType, handlerPluginName));
+
+        AbstractJobPlugin handler = LoadUtil.loadJobPlugin(
+                handlerPluginType, handlerPluginName);
+
+        JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector(
+                this.getContainerCommunicator());
+        handler.setJobPluginCollector(jobPluginCollector);
+
+        handler.postHandler(configuration);
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+
+    /**
+     * 执行reader和writer最细粒度的切分,需要注意的是,writer的切分结果要参照reader的切分结果,
+     * 达到切分后数目相等,才能满足1:1的通道模型,所以这里可以将reader和writer的配置整合到一起,
+     * 然后,为避免顺序给读写端带来长尾影响,将整合的结果shuffler掉
+     */
+    private int split() {
+        this.adjustChannelNumber();
+
+        if (this.needChannelNumber <= 0) {
+            this.needChannelNumber = 1;
+        }
+
+        List<Configuration> readerTaskConfigs = this
+                .doReaderSplit(this.needChannelNumber);
+        int taskNumber = readerTaskConfigs.size();
+        List<Configuration> writerTaskConfigs = this
+                .doWriterSplit(taskNumber);
+
+        List<Configuration> transformerList = this.configuration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT_TRANSFORMER);
+
+        LOG.debug("transformer configuration: "+ JsonUtils.toJsonString(transformerList));
+        /**
+         * 输入是reader和writer的parameter list,输出是content下面元素的list
+         */
+        List<Configuration> contentConfig = mergeReaderAndWriterTaskConfigs(
+                readerTaskConfigs, writerTaskConfigs, transformerList);
+
+
+        LOG.debug("contentConfig configuration: "+ JsonUtils.toJsonString(contentConfig));
+
+        this.configuration.set(CoreConstant.DATAX_JOB_CONTENT, contentConfig);
+
+        return contentConfig.size();
+    }
+
+    private void adjustChannelNumber() {
+        int needChannelNumberByByte = Integer.MAX_VALUE;
+        int needChannelNumberByRecord = Integer.MAX_VALUE;
+
+        boolean isByteLimit = (this.configuration.getInt(
+                CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 0) > 0);
+        if (isByteLimit) {
+            long globalLimitedByteSpeed = this.configuration.getInt(
+                    CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 10 * 1024 * 1024);
+
+            // 在byte流控情况下,单个Channel流量最大值必须设置,否则报错!
+            Long channelLimitedByteSpeed = this.configuration
+                    .getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE);
+            if (channelLimitedByteSpeed == null || channelLimitedByteSpeed <= 0) {
+                throw DataXException.asDataXException(
+                        FrameworkErrorCode.CONFIG_ERROR,
+                        "在有总bps限速条件下,单个channel的bps值不能为空,也不能为非正数");
+            }
+
+            needChannelNumberByByte =
+                    (int) (globalLimitedByteSpeed / channelLimitedByteSpeed);
+            needChannelNumberByByte =
+                    needChannelNumberByByte > 0 ? needChannelNumberByByte : 1;
+            LOG.info("Job set Max-Byte-Speed to " + globalLimitedByteSpeed + " bytes.");
+        }
+
+        boolean isRecordLimit = (this.configuration.getInt(
+                CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 0)) > 0;
+        if (isRecordLimit) {
+            long globalLimitedRecordSpeed = this.configuration.getInt(
+                    CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 100000);
+
+            Long channelLimitedRecordSpeed = this.configuration.getLong(
+                    CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD);
+            if (channelLimitedRecordSpeed == null || channelLimitedRecordSpeed <= 0) {
+                throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR,
+                        "在有总tps限速条件下,单个channel的tps值不能为空,也不能为非正数");
+            }
+
+            needChannelNumberByRecord =
+                    (int) (globalLimitedRecordSpeed / channelLimitedRecordSpeed);
+            needChannelNumberByRecord =
+                    needChannelNumberByRecord > 0 ? needChannelNumberByRecord : 1;
+            LOG.info("Job set Max-Record-Speed to " + globalLimitedRecordSpeed + " records.");
+        }
+
+        // 取较小值
+        this.needChannelNumber = needChannelNumberByByte < needChannelNumberByRecord ?
+                needChannelNumberByByte : needChannelNumberByRecord;
+
+        // 如果从byte或record上设置了needChannelNumber则退出
+        if (this.needChannelNumber < Integer.MAX_VALUE) {
+            return;
+        }
+
+        boolean isChannelLimit = (this.configuration.getInt(
+                CoreConstant.DATAX_JOB_SETTING_SPEED_CHANNEL, 0) > 0);
+        if (isChannelLimit) {
+            this.needChannelNumber = this.configuration.getInt(
+                    CoreConstant.DATAX_JOB_SETTING_SPEED_CHANNEL);
+
+            LOG.info("Job set Channel-Number to " + this.needChannelNumber
+                    + " channels.");
+
+            return;
+        }
+
+        throw DataXException.asDataXException(
+                FrameworkErrorCode.CONFIG_ERROR,
+                "Job运行速度必须设置");
+    }
+
+    /**
+     * schedule首先完成的工作是把上一步reader和writer split的结果整合到具体taskGroupContainer中,
+     * 同时不同的执行模式调用不同的调度策略,将所有任务调度起来
+     */
+    private void schedule() {
+        /**
+         * 这里的全局speed和每个channel的速度设置为B/s
+         */
+        int channelsPerTaskGroup = this.configuration.getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, 5);
+        int taskNumber = this.configuration.getList(
+                CoreConstant.DATAX_JOB_CONTENT).size();
+
+        this.needChannelNumber = Math.min(this.needChannelNumber, taskNumber);
+        PerfTrace.getInstance().setChannelNumber(needChannelNumber);
+
+        /**
+         * 通过获取配置信息得到每个taskGroup需要运行哪些tasks任务
+         */
+
+        List<Configuration> taskGroupConfigs = JobAssignUtil.assignFairly(this.configuration,
+                this.needChannelNumber, channelsPerTaskGroup);
+
+        LOG.info("Scheduler starts [{}] taskGroups.", taskGroupConfigs.size());
+
+        ExecuteMode executeMode = null;
+        AbstractScheduler scheduler;
+        try {
+        	executeMode = ExecuteMode.STANDALONE;
+            scheduler = initStandaloneScheduler(this.configuration);
+
+            //设置 executeMode
+            for (Configuration taskGroupConfig : taskGroupConfigs) {
+                taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, executeMode.getValue());
+            }
+
+            if (executeMode == ExecuteMode.LOCAL || executeMode == ExecuteMode.DISTRIBUTE) {
+                if (this.jobId <= 0) {
+                    throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+                            "在[ local | distribute ]模式下必须设置jobId,并且其值 > 0 .");
+                }
+            }
+
+            LOG.info("Running by {} Mode.", executeMode);
+
+            this.startTransferTimeStamp = System.currentTimeMillis();
+
+            scheduler.schedule(taskGroupConfigs);
+
+            this.endTransferTimeStamp = System.currentTimeMillis();
+        } catch (Exception e) {
+            LOG.error("运行scheduler 模式[{}]出错.", executeMode);
+            this.endTransferTimeStamp = System.currentTimeMillis();
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.RUNTIME_ERROR, e);
+        }
+
+        /**
+         * 检查任务执行情况
+         */
+        this.checkLimit();
+    }
+
+
+    private AbstractScheduler initStandaloneScheduler(Configuration configuration) {
+        AbstractContainerCommunicator containerCommunicator = new StandAloneJobContainerCommunicator(configuration);
+        super.setContainerCommunicator(containerCommunicator);
+
+        return new StandAloneScheduler(containerCommunicator);
+    }
+
+    private void post() {
+        this.postJobWriter();
+        this.postJobReader();
+    }
+
+    private void destroy() {
+        if (this.jobWriter != null) {
+            this.jobWriter.destroy();
+            this.jobWriter = null;
+        }
+        if (this.jobReader != null) {
+            this.jobReader.destroy();
+            this.jobReader = null;
+        }
+    }
+
+    private void logStatistics() {
+        long totalCosts = (this.endTimeStamp - this.startTimeStamp) / 1000;
+        long transferCosts = (this.endTransferTimeStamp - this.startTransferTimeStamp) / 1000;
+        if (0L == transferCosts) {
+            transferCosts = 1L;
+        }
+
+        if (super.getContainerCommunicator() == null) {
+            return;
+        }
+
+        Communication communication = super.getContainerCommunicator().collect();
+        communication.setTimestamp(this.endTimeStamp);
+
+        Communication tempComm = new Communication();
+        tempComm.setTimestamp(this.startTransferTimeStamp);
+
+        Communication reportCommunication = CommunicationTool.getReportCommunication(communication, tempComm, this.totalStage);
+
+        // 字节速率
+        long byteSpeedPerSecond = communication.getLongCounter(CommunicationTool.READ_SUCCEED_BYTES)
+                / transferCosts;
+
+        long recordSpeedPerSecond = communication.getLongCounter(CommunicationTool.READ_SUCCEED_RECORDS)
+                / transferCosts;
+
+        reportCommunication.setLongCounter(CommunicationTool.BYTE_SPEED, byteSpeedPerSecond);
+        reportCommunication.setLongCounter(CommunicationTool.RECORD_SPEED, recordSpeedPerSecond);
+
+        super.getContainerCommunicator().report(reportCommunication);
+
+
+        LOG.info(String.format(
+                "\n" + "%-26s: %-18s\n" + "%-26s: %-18s\n" + "%-26s: %19s\n"
+                        + "%-26s: %19s\n" + "%-26s: %19s\n" + "%-26s: %19s\n"
+                        + "%-26s: %19s\n",
+                "任务启动时刻",
+                dateFormat.format(startTimeStamp),
+
+                "任务结束时刻",
+                dateFormat.format(endTimeStamp),
+
+                "任务总计耗时",
+                String.valueOf(totalCosts) + "s",
+                "任务平均流量",
+                cn.tr.plugin.dataX.common.util.StrUtil.stringify(byteSpeedPerSecond)
+                        + "/s",
+                "记录写入速度",
+                String.valueOf(recordSpeedPerSecond)
+                        + "rec/s", "读出记录总数",
+                String.valueOf(CommunicationTool.getTotalReadRecords(communication)),
+                "读写失败总数",
+                String.valueOf(CommunicationTool.getTotalErrorRecords(communication))
+        ));
+
+        if (communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS) > 0
+                || communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS) > 0
+                || communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS) > 0) {
+            LOG.info(String.format(
+                    "\n" + "%-26s: %19s\n" + "%-26s: %19s\n" + "%-26s: %19s\n",
+                    "Transformer成功记录总数",
+                    communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS),
+
+                    "Transformer失败记录总数",
+                    communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS),
+
+                    "Transformer过滤记录总数",
+                    communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS)
+            ));
+        }
+
+
+    }
+
+    /**
+     * reader job的初始化,返回Reader.Job
+     *
+     * @return
+     */
+    private Reader.Job initJobReader(
+            JobPluginCollector jobPluginCollector) {
+        this.readerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_CONTENT_READER_NAME);
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+
+        Reader.Job jobReader = (Reader.Job) LoadUtil.loadJobPlugin(
+                PluginType.READER, this.readerPluginName);
+
+        // 设置reader的jobConfig
+        jobReader.setPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER));
+
+        // 设置reader的readerConfig
+        jobReader.setPeerPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER));
+
+        jobReader.setJobPluginCollector(jobPluginCollector);
+        jobReader.init();
+
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+        return jobReader;
+    }
+
+    /**
+     * writer job的初始化,返回Writer.Job
+     *
+     * @return
+     */
+    private Writer.Job initJobWriter(
+            JobPluginCollector jobPluginCollector) {
+        this.writerPluginName = this.configuration.getString(
+                CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME);
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+
+        Writer.Job jobWriter = (Writer.Job) LoadUtil.loadJobPlugin(
+                PluginType.WRITER, this.writerPluginName);
+
+        // 设置writer的jobConfig
+        jobWriter.setPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_WRITER_PARAMETER));
+
+        // 设置reader的readerConfig
+        jobWriter.setPeerPluginJobConf(this.configuration.getConfiguration(
+                CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER));
+
+        jobWriter.setPeerPluginName(this.readerPluginName);
+        jobWriter.setJobPluginCollector(jobPluginCollector);
+        jobWriter.init();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+
+        return jobWriter;
+    }
+
+    private void prepareJobReader() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+        LOG.info(String.format("DataX Reader.Job [%s] do prepare work .",
+                this.readerPluginName));
+        this.jobReader.prepare();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    private void prepareJobWriter() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+        LOG.info(String.format("DataX Writer.Job [%s] do prepare work .",
+                this.writerPluginName));
+        this.jobWriter.prepare();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    // TODO: 如果源头就是空数据
+    private List<Configuration> doReaderSplit(int adviceNumber) {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+        List<Configuration> readerSlicesConfigs =
+                this.jobReader.split(adviceNumber);
+        if (readerSlicesConfigs == null || readerSlicesConfigs.size() <= 0) {
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.PLUGIN_SPLIT_ERROR,
+                    "reader切分的task数目不能小于等于0");
+        }
+        LOG.info("DataX Reader.Job [{}] splits to [{}] tasks.",
+                this.readerPluginName, readerSlicesConfigs.size());
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+        return readerSlicesConfigs;
+    }
+
+    private List<Configuration> doWriterSplit(int readerTaskNumber) {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+
+        List<Configuration> writerSlicesConfigs = this.jobWriter
+                .split(readerTaskNumber);
+        if (writerSlicesConfigs == null || writerSlicesConfigs.size() <= 0) {
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.PLUGIN_SPLIT_ERROR,
+                    "writer切分的task不能小于等于0");
+        }
+        LOG.info("DataX Writer.Job [{}] splits to [{}] tasks.",
+                this.writerPluginName, writerSlicesConfigs.size());
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+
+        return writerSlicesConfigs;
+    }
+
+    /**
+     * 按顺序整合reader和writer的配置,这里的顺序不能乱! 输入是reader、writer级别的配置,输出是一个完整task的配置
+     *
+     * @param readerTasksConfigs
+     * @param writerTasksConfigs
+     * @return
+     */
+    private List<Configuration> mergeReaderAndWriterTaskConfigs(
+            List<Configuration> readerTasksConfigs,
+            List<Configuration> writerTasksConfigs) {
+        return mergeReaderAndWriterTaskConfigs(readerTasksConfigs, writerTasksConfigs, null);
+    }
+
+    private List<Configuration> mergeReaderAndWriterTaskConfigs(
+            List<Configuration> readerTasksConfigs,
+            List<Configuration> writerTasksConfigs,
+            List<Configuration> transformerConfigs) {
+        if (readerTasksConfigs.size() != writerTasksConfigs.size()) {
+            throw DataXException.asDataXException(
+                    FrameworkErrorCode.PLUGIN_SPLIT_ERROR,
+                    String.format("reader切分的task数目[%d]不等于writer切分的task数目[%d].",
+                            readerTasksConfigs.size(), writerTasksConfigs.size())
+            );
+        }
+
+        List<Configuration> contentConfigs = new ArrayList<Configuration>();
+        for (int i = 0; i < readerTasksConfigs.size(); i++) {
+            Configuration taskConfig = Configuration.newDefault();
+            taskConfig.set(CoreConstant.JOB_READER_NAME,
+                    this.readerPluginName);
+            taskConfig.set(CoreConstant.JOB_READER_PARAMETER,
+                    readerTasksConfigs.get(i));
+            taskConfig.set(CoreConstant.JOB_WRITER_NAME,
+                    this.writerPluginName);
+            taskConfig.set(CoreConstant.JOB_WRITER_PARAMETER,
+                    writerTasksConfigs.get(i));
+
+            if(transformerConfigs!=null && transformerConfigs.size()>0){
+                taskConfig.set(CoreConstant.JOB_TRANSFORMER, transformerConfigs);
+            }
+
+            taskConfig.set(CoreConstant.TASK_ID, i);
+            contentConfigs.add(taskConfig);
+        }
+
+        return contentConfigs;
+    }
+
+    /**
+     * 这里比较复杂,分两步整合 1. tasks到channel 2. channel到taskGroup
+     * 合起来考虑,其实就是把tasks整合到taskGroup中,需要满足计算出的channel数,同时不能多起channel
+     * <p/>
+     * example:
+     * <p/>
+     * 前提条件: 切分后是1024个分表,假设用户要求总速率是1000M/s,每个channel的速率的3M/s,
+     * 每个taskGroup负责运行7个channel
+     * <p/>
+     * 计算: 总channel数为:1000M/s / 3M/s =
+     * 333个,为平均分配,计算可知有308个每个channel有3个tasks,而有25个每个channel有4个tasks,
+     * 需要的taskGroup数为:333 / 7 =
+     * 47...4,也就是需要48个taskGroup,47个是每个负责7个channel,有4个负责1个channel
+     * <p/>
+     * 处理:我们先将这负责4个channel的taskGroup处理掉,逻辑是:
+     * 先按平均为3个tasks找4个channel,设置taskGroupId为0,
+     * 接下来就像发牌一样轮询分配task到剩下的包含平均channel数的taskGroup中
+     * <p/>
+     * TODO delete it
+     *
+     * @param averTaskPerChannel
+     * @param channelNumber
+     * @param channelsPerTaskGroup
+     * @return 每个taskGroup独立的全部配置
+     */
+    @SuppressWarnings("serial")
+    private List<Configuration> distributeTasksToTaskGroup(
+            int averTaskPerChannel, int channelNumber,
+            int channelsPerTaskGroup) {
+        Validate.isTrue(averTaskPerChannel > 0 && channelNumber > 0
+                        && channelsPerTaskGroup > 0,
+                "每个channel的平均task数[averTaskPerChannel],channel数目[channelNumber],每个taskGroup的平均channel数[channelsPerTaskGroup]都应该为正数");
+        List<Configuration> taskConfigs = this.configuration
+                .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+        int taskGroupNumber = channelNumber / channelsPerTaskGroup;
+        int leftChannelNumber = channelNumber % channelsPerTaskGroup;
+        if (leftChannelNumber > 0) {
+            taskGroupNumber += 1;
+        }
+
+        /**
+         * 如果只有一个taskGroup,直接打标返回
+         */
+        if (taskGroupNumber == 1) {
+            final Configuration taskGroupConfig = this.configuration.clone();
+            /**
+             * configure的clone不能clone出
+             */
+            taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, this.configuration
+                    .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT));
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL,
+                    channelNumber);
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, 0);
+            return new ArrayList<Configuration>() {
+                {
+                    add(taskGroupConfig);
+                }
+            };
+        }
+
+        List<Configuration> taskGroupConfigs = new ArrayList<Configuration>();
+        /**
+         * 将每个taskGroup中content的配置清空
+         */
+        for (int i = 0; i < taskGroupNumber; i++) {
+            Configuration taskGroupConfig = this.configuration.clone();
+            List<Configuration> taskGroupJobContent = taskGroupConfig
+                    .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+            taskGroupJobContent.clear();
+            taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, taskGroupJobContent);
+
+            taskGroupConfigs.add(taskGroupConfig);
+        }
+
+        int taskConfigIndex = 0;
+        int channelIndex = 0;
+        int taskGroupConfigIndex = 0;
+
+        /**
+         * 先处理掉taskGroup包含channel数不是平均值的taskGroup
+         */
+        if (leftChannelNumber > 0) {
+            Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex);
+            for (; channelIndex < leftChannelNumber; channelIndex++) {
+                for (int i = 0; i < averTaskPerChannel; i++) {
+                    List<Configuration> taskGroupJobContent = taskGroupConfig
+                            .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+                    taskGroupJobContent.add(taskConfigs.get(taskConfigIndex++));
+                    taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT,
+                            taskGroupJobContent);
+                }
+            }
+
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL,
+                    leftChannelNumber);
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID,
+                    taskGroupConfigIndex++);
+        }
+
+        /**
+         * 下面需要轮询分配,并打上channel数和taskGroupId标记
+         */
+        int equalDivisionStartIndex = taskGroupConfigIndex;
+        for (; taskConfigIndex < taskConfigs.size()
+                && equalDivisionStartIndex < taskGroupConfigs.size(); ) {
+            for (taskGroupConfigIndex = equalDivisionStartIndex; taskGroupConfigIndex < taskGroupConfigs
+                    .size() && taskConfigIndex < taskConfigs.size(); taskGroupConfigIndex++) {
+                Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex);
+                List<Configuration> taskGroupJobContent = taskGroupConfig
+                        .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+                taskGroupJobContent.add(taskConfigs.get(taskConfigIndex++));
+                taskGroupConfig.set(
+                        CoreConstant.DATAX_JOB_CONTENT, taskGroupJobContent);
+            }
+        }
+
+        for (taskGroupConfigIndex = equalDivisionStartIndex;
+             taskGroupConfigIndex < taskGroupConfigs.size(); ) {
+            Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex);
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL,
+                    channelsPerTaskGroup);
+            taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID,
+                    taskGroupConfigIndex++);
+        }
+
+        return taskGroupConfigs;
+    }
+
+    private void postJobReader() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.READER, this.readerPluginName));
+        LOG.info("DataX Reader.Job [{}] do post work.",
+                this.readerPluginName);
+        this.jobReader.post();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    private void postJobWriter() {
+        classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader(
+                PluginType.WRITER, this.writerPluginName));
+        LOG.info("DataX Writer.Job [{}] do post work.",
+                this.writerPluginName);
+        this.jobWriter.post();
+        classLoaderSwapper.restoreCurrentThreadClassLoader();
+    }
+
+    /**
+     * 检查最终结果是否超出阈值,如果阈值设定小于1,则表示百分数阈值,大于1表示条数阈值。
+     *
+     * @param
+     */
+    private void checkLimit() {
+        Communication communication = super.getContainerCommunicator().collect();
+        errorLimit.checkRecordLimit(communication);
+        errorLimit.checkPercentageLimit(communication);
+    }
+
+    /**
+     * 调用外部hook
+     */
+    private void invokeHooks() {
+        Communication comm = super.getContainerCommunicator().collect();
+        HookInvoker invoker = new HookInvoker(CoreConstant.DATAX_HOME + "/hook", configuration, comm.getCounter());
+        invoker.invokeAll();
+    }
+}

+ 22 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/meta/ExecuteMode.java

@@ -0,0 +1,22 @@
+package cn.tr.plugin.dataX.core.job.meta;
+
+/**
+ * Created by liupeng on 15/12/21.
+ */
+public enum ExecuteMode {
+    STANDALONE("standalone"), ;
+
+    String value;
+
+    private ExecuteMode(String value) {
+        this.value = value;
+    }
+
+    public String value() {
+        return this.value;
+    }
+
+    public String getValue() {
+        return this.value;
+    }
+}

+ 32 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/meta/State.java

@@ -0,0 +1,32 @@
+package cn.tr.plugin.dataX.core.job.meta;
+
+/**
+ * Created by liupeng on 15/12/21.
+ */
+public enum State {
+    SUBMITTING(10),
+    WAITING(20),
+    RUNNING(30),
+    KILLING(40),
+    KILLED(50),
+    FAILED(60),
+    SUCCEEDED(70), ;
+
+    int value;
+
+    private State(int value) {
+        this.value = value;
+    }
+
+    public int value() {
+        return this.value;
+    }
+
+    public boolean isFinished() {
+        return this == KILLED || this == FAILED || this == SUCCEEDED;
+    }
+
+    public boolean isRunning() {
+        return !this.isFinished();
+    }
+}

+ 135 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/AbstractScheduler.java

@@ -0,0 +1,135 @@
+package cn.tr.plugin.dataX.core.job.scheduler;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.util.ErrorRecordChecker;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public abstract class AbstractScheduler {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(AbstractScheduler.class);
+
+    private ErrorRecordChecker errorLimit;
+
+    private AbstractContainerCommunicator containerCommunicator;
+
+    private Long jobId;
+
+    public Long getJobId() {
+        return jobId;
+    }
+
+    public AbstractScheduler(AbstractContainerCommunicator containerCommunicator) {
+        this.containerCommunicator = containerCommunicator;
+    }
+
+    public void schedule(List<Configuration> configurations) {
+        Validate.notNull(configurations,
+                "scheduler配置不能为空");
+        int jobReportIntervalInMillSec = configurations.get(0).getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_REPORTINTERVAL, 30000);
+        int jobSleepIntervalInMillSec = configurations.get(0).getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_SLEEPINTERVAL, 10000);
+
+        this.jobId = configurations.get(0).getLong(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID);
+
+        errorLimit = new ErrorRecordChecker(configurations.get(0));
+
+        /**
+         * 给 taskGroupContainer 的 Communication 注册
+         */
+        this.containerCommunicator.registerCommunication(configurations);
+
+        int totalTasks = calculateTaskCount(configurations);
+        startAllTaskGroup(configurations);
+
+        Communication lastJobContainerCommunication = new Communication();
+
+        long lastReportTimeStamp = System.currentTimeMillis();
+        try {
+            while (true) {
+                /**
+                 * step 1: collect job stat
+                 * step 2: getReport info, then report it
+                 * step 3: errorLimit do check
+                 * step 4: dealSucceedStat();
+                 * step 5: dealKillingStat();
+                 * step 6: dealFailedStat();
+                 * step 7: refresh last job stat, and then sleep for next while
+                 *
+                 * above steps, some ones should report info to DS
+                 *
+                 */
+                Communication nowJobContainerCommunication = this.containerCommunicator.collect();
+                nowJobContainerCommunication.setTimestamp(System.currentTimeMillis());
+                LOG.debug(nowJobContainerCommunication.toString());
+
+                //汇报周期
+                long now = System.currentTimeMillis();
+                if (now - lastReportTimeStamp > jobReportIntervalInMillSec) {
+                    Communication reportCommunication = CommunicationTool
+                            .getReportCommunication(nowJobContainerCommunication, lastJobContainerCommunication, totalTasks);
+
+                    this.containerCommunicator.report(reportCommunication);
+                    lastReportTimeStamp = now;
+                    lastJobContainerCommunication = nowJobContainerCommunication;
+                }
+
+                errorLimit.checkRecordLimit(nowJobContainerCommunication);
+
+                if (nowJobContainerCommunication.getState() == State.SUCCEEDED) {
+                    LOG.info("Scheduler accomplished all tasks.");
+                    break;
+                }
+
+                if (isJobKilling(this.getJobId())) {
+                    dealKillingStat(this.containerCommunicator, totalTasks);
+                } else if (nowJobContainerCommunication.getState() == State.FAILED) {
+                    dealFailedStat(this.containerCommunicator, nowJobContainerCommunication.getThrowable());
+                }
+
+                Thread.sleep(jobSleepIntervalInMillSec);
+            }
+        } catch (InterruptedException e) {
+            // 以 failed 状态退出
+            LOG.error("捕获到InterruptedException异常!", e);
+
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR, e);
+        }
+
+    }
+
+    protected abstract void startAllTaskGroup(List<Configuration> configurations);
+
+    protected abstract void dealFailedStat(AbstractContainerCommunicator frameworkCollector, Throwable throwable);
+
+    protected abstract void dealKillingStat(AbstractContainerCommunicator frameworkCollector, int totalTasks);
+
+    private int calculateTaskCount(List<Configuration> configurations) {
+        int totalTasks = 0;
+        for (Configuration taskGroupConfiguration : configurations) {
+            totalTasks += taskGroupConfiguration.getListConfiguration(
+                    CoreConstant.DATAX_JOB_CONTENT).size();
+        }
+        return totalTasks;
+    }
+
+//    private boolean isJobKilling(Long jobId) {
+//        Result<Integer> jobInfo = DataxServiceUtil.getJobInfo(jobId);
+//        return jobInfo.getData() == State.KILLING.value();
+//    }
+
+    protected  abstract  boolean isJobKilling(Long jobId);
+}

+ 60 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/processinner/ProcessInnerScheduler.java

@@ -0,0 +1,60 @@
+package cn.tr.plugin.dataX.core.job.scheduler.processinner;
+
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.scheduler.AbstractScheduler;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.taskgroup.TaskGroupContainer;
+import cn.tr.plugin.dataX.core.taskgroup.runner.TaskGroupContainerRunner;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+public abstract class ProcessInnerScheduler extends AbstractScheduler {
+
+    private ExecutorService taskGroupContainerExecutorService;
+
+    public ProcessInnerScheduler(AbstractContainerCommunicator containerCommunicator) {
+        super(containerCommunicator);
+    }
+
+    @Override
+    public void startAllTaskGroup(List<Configuration> configurations) {
+        this.taskGroupContainerExecutorService = Executors
+                .newFixedThreadPool(configurations.size());
+
+        for (Configuration taskGroupConfiguration : configurations) {
+            TaskGroupContainerRunner taskGroupContainerRunner = newTaskGroupContainerRunner(taskGroupConfiguration);
+            this.taskGroupContainerExecutorService.execute(taskGroupContainerRunner);
+        }
+
+        this.taskGroupContainerExecutorService.shutdown();
+    }
+
+    @Override
+    public void dealFailedStat(AbstractContainerCommunicator frameworkCollector, Throwable throwable) {
+        this.taskGroupContainerExecutorService.shutdownNow();
+        throw DataXException.asDataXException(
+                FrameworkErrorCode.PLUGIN_RUNTIME_ERROR, throwable);
+    }
+
+
+    @Override
+    public void dealKillingStat(AbstractContainerCommunicator frameworkCollector, int totalTasks) {
+        //通过进程退出返回码标示状态
+        this.taskGroupContainerExecutorService.shutdownNow();
+        throw DataXException.asDataXException(FrameworkErrorCode.KILLED_EXIT_VALUE,
+                "job killed status");
+    }
+
+
+    private TaskGroupContainerRunner newTaskGroupContainerRunner(
+            Configuration configuration) {
+        TaskGroupContainer taskGroupContainer = new TaskGroupContainer(configuration);
+
+        return new TaskGroupContainerRunner(taskGroupContainer);
+    }
+
+}

+ 19 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/job/scheduler/processinner/StandAloneScheduler.java

@@ -0,0 +1,19 @@
+package cn.tr.plugin.dataX.core.job.scheduler.processinner;
+
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+
+/**
+ * Created by hongjiao.hj on 2014/12/22.
+ */
+public class StandAloneScheduler extends ProcessInnerScheduler{
+
+    public StandAloneScheduler(AbstractContainerCommunicator containerCommunicator) {
+        super(containerCommunicator);
+    }
+
+    @Override
+    protected boolean isJobKilling(Long jobId) {
+        return false;
+    }
+
+}

+ 281 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/Communication.java

@@ -0,0 +1,281 @@
+package cn.tr.plugin.dataX.core.statistics.communication;
+
+import cn.hutool.core.util.StrUtil;
+import cn.tr.plugin.dataX.common.base.BaseObject;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.util.Validate;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * DataX所有的状态及统计信息交互类,job、taskGroup、task等的消息汇报都走该类
+ */
+public class Communication extends BaseObject implements Cloneable {
+    /**
+     * 所有的数值key-value对 *
+     */
+    private Map<String, Number> counter;
+
+    /**
+     * 运行状态 *
+     */
+    private State state;
+
+    /**
+     * 异常记录 *
+     */
+    private Throwable throwable;
+
+    /**
+     * 记录的timestamp *
+     */
+    private long timestamp;
+
+    /**
+     * task给job的信息 *
+     */
+    Map<String, List<String>> message;
+
+    public Communication() {
+        this.init();
+    }
+
+    public synchronized void reset() {
+        this.init();
+    }
+
+    private void init() {
+        this.counter = new ConcurrentHashMap<String, Number>();
+        this.state = State.RUNNING;
+        this.throwable = null;
+        this.message = new ConcurrentHashMap<String, List<String>>();
+        this.timestamp = System.currentTimeMillis();
+    }
+
+    public Map<String, Number> getCounter() {
+        return this.counter;
+    }
+
+    public State getState() {
+        return this.state;
+    }
+
+    public synchronized void setState(State state, boolean isForce) {
+        if (!isForce && this.state.equals(State.FAILED)) {
+            return;
+        }
+
+        this.state = state;
+    }
+
+    public synchronized void setState(State state) {
+        setState(state, false);
+    }
+
+    public Throwable getThrowable() {
+        return this.throwable;
+    }
+
+    public synchronized String getThrowableMessage() {
+        return this.throwable == null ? "" : this.throwable.getMessage();
+    }
+
+    public void setThrowable(Throwable throwable) {
+        setThrowable(throwable, false);
+    }
+
+    public synchronized void setThrowable(Throwable throwable, boolean isForce) {
+        if (isForce) {
+            this.throwable = throwable;
+        } else {
+            this.throwable = this.throwable == null ? throwable : this.throwable;
+        }
+    }
+
+    public long getTimestamp() {
+        return this.timestamp;
+    }
+
+    public void setTimestamp(long timestamp) {
+        this.timestamp = timestamp;
+    }
+
+    public Map<String, List<String>> getMessage() {
+        return this.message;
+    }
+
+    public List<String> getMessage(final String key) {
+        return message.get(key);
+    }
+
+    public synchronized void addMessage(final String key, final String value) {
+        Validate.isTrue(StrUtil.isNotBlank(key), "增加message的key不能为空");
+        List valueList = this.message.get(key);
+        if (null == valueList) {
+            valueList = new ArrayList<String>();
+            this.message.put(key, valueList);
+        }
+
+        valueList.add(value);
+    }
+
+    public synchronized Long getLongCounter(final String key) {
+        Number value = this.counter.get(key);
+
+        return value == null ? 0 : value.longValue();
+    }
+
+    public synchronized void setLongCounter(final String key, final long value) {
+        Validate.isTrue(StrUtil.isNotBlank(key), "设置counter的key不能为空");
+        this.counter.put(key, value);
+    }
+
+    public synchronized Double getDoubleCounter(final String key) {
+        Number value = this.counter.get(key);
+
+        return value == null ? 0.0d : value.doubleValue();
+    }
+
+    public synchronized void setDoubleCounter(final String key, final double value) {
+        Validate.isTrue(StrUtil.isNotBlank(key), "设置counter的key不能为空");
+        this.counter.put(key, value);
+    }
+
+    public synchronized void increaseCounter(final String key, final long deltaValue) {
+        Validate.isTrue(StrUtil.isNotBlank(key), "增加counter的key不能为空");
+
+        long value = this.getLongCounter(key);
+
+        this.counter.put(key, value + deltaValue);
+    }
+
+    @Override
+    public Communication clone() {
+        Communication communication = new Communication();
+
+        /**
+         * clone counter
+         */
+        if (this.counter != null) {
+            for (Map.Entry<String, Number> entry : this.counter.entrySet()) {
+                String key = entry.getKey();
+                Number value = entry.getValue();
+                if (value instanceof Long) {
+                    communication.setLongCounter(key, (Long) value);
+                } else if (value instanceof Double) {
+                    communication.setDoubleCounter(key, (Double) value);
+                }
+            }
+        }
+
+        communication.setState(this.state, true);
+        communication.setThrowable(this.throwable, true);
+        communication.setTimestamp(this.timestamp);
+
+        /**
+         * clone message
+         */
+        if (this.message != null) {
+            for (final Map.Entry<String, List<String>> entry : this.message.entrySet()) {
+                String key = entry.getKey();
+                List value = new ArrayList() {{
+                    addAll(entry.getValue());
+                }};
+                communication.getMessage().put(key, value);
+            }
+        }
+
+        return communication;
+    }
+
+    public synchronized Communication mergeFrom(final Communication otherComm) {
+        if (otherComm == null) {
+            return this;
+        }
+
+        /**
+         * counter的合并,将otherComm的值累加到this中,不存在的则创建
+         * 同为long
+         */
+        for (Entry<String, Number> entry : otherComm.getCounter().entrySet()) {
+            String key = entry.getKey();
+            Number otherValue = entry.getValue();
+            if (otherValue == null) {
+                continue;
+            }
+
+            Number value = this.counter.get(key);
+            if (value == null) {
+                value = otherValue;
+            } else {
+                if (value instanceof Long && otherValue instanceof Long) {
+                    value = value.longValue() + otherValue.longValue();
+                } else {
+                    value = value.doubleValue() + value.doubleValue();
+                }
+            }
+
+            this.counter.put(key, value);
+        }
+
+        // 合并state
+        mergeStateFrom(otherComm);
+
+        /**
+         * 合并throwable,当this的throwable为空时,
+         * 才将otherComm的throwable合并进来
+         */
+        this.throwable = this.throwable == null ? otherComm.getThrowable() : this.throwable;
+
+        /**
+         * timestamp是整个一次合并的时间戳,单独两两communication不作合并
+         */
+
+        /**
+         * message的合并采取求并的方式,即全部累计在一起
+         */
+        for (Entry<String, List<String>> entry : otherComm.getMessage().entrySet()) {
+            String key = entry.getKey();
+            List<String> valueList = this.message.get(key);
+            if (valueList == null) {
+                valueList = new ArrayList<String>();
+                this.message.put(key, valueList);
+            }
+
+            valueList.addAll(entry.getValue());
+        }
+
+        return this;
+    }
+
+    /**
+     * 合并state,优先级: (Failed | Killed) > Running > Success
+     * 这里不会出现 Killing 状态,killing 状态只在 Job 自身状态上才有.
+     */
+    public synchronized State mergeStateFrom(final Communication otherComm) {
+        State retState = this.getState();
+        if (otherComm == null) {
+            return retState;
+        }
+
+        if (this.state == State.FAILED || otherComm.getState() == State.FAILED
+                || this.state == State.KILLED || otherComm.getState() == State.KILLED) {
+            retState = State.FAILED;
+        } else if (this.state.isRunning() || otherComm.state.isRunning()) {
+            retState = State.RUNNING;
+        }
+
+        this.setState(retState);
+        return retState;
+    }
+    
+    public synchronized boolean isFinished(){
+    	return this.state == State.SUCCEEDED || this.state == State.FAILED	
+    			|| this.state == State.KILLED;
+    }
+    
+}

+ 285 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/CommunicationTool.java

@@ -0,0 +1,285 @@
+package cn.tr.plugin.dataX.core.statistics.communication;
+
+
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.statistics.PerfTrace;
+import cn.tr.plugin.dataX.common.util.StrUtil;
+import cn.tr.plugin.dataX.core.util.Validate;
+
+import java.text.DecimalFormat;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 这里主要是业务层面的处理
+ */
+public final class CommunicationTool {
+    public static final String STAGE = "stage";
+    public static final String BYTE_SPEED = "byteSpeed";
+    public static final String RECORD_SPEED = "recordSpeed";
+    public static final String PERCENTAGE = "percentage";
+
+    public static final String READ_SUCCEED_RECORDS = "readSucceedRecords";
+    public static final String READ_SUCCEED_BYTES = "readSucceedBytes";
+
+    public static final String READ_FAILED_RECORDS = "readFailedRecords";
+    public static final String READ_FAILED_BYTES = "readFailedBytes";
+
+    public static final String WRITE_RECEIVED_RECORDS = "writeReceivedRecords";
+    public static final String WRITE_RECEIVED_BYTES = "writeReceivedBytes";
+
+    public static final String WRITE_FAILED_RECORDS = "writeFailedRecords";
+    public static final String WRITE_FAILED_BYTES = "writeFailedBytes";
+
+    public static final String TOTAL_READ_RECORDS = "totalReadRecords";
+    private static final String TOTAL_READ_BYTES = "totalReadBytes";
+
+    private static final String TOTAL_ERROR_RECORDS = "totalErrorRecords";
+    private static final String TOTAL_ERROR_BYTES = "totalErrorBytes";
+
+    private static final String WRITE_SUCCEED_RECORDS = "writeSucceedRecords";
+    private static final String WRITE_SUCCEED_BYTES = "writeSucceedBytes";
+
+    public static final String WAIT_WRITER_TIME = "waitWriterTime";
+
+    public static final String WAIT_READER_TIME = "waitReaderTime";
+
+    public static final String TRANSFORMER_USED_TIME = "totalTransformerUsedTime";
+    public static final String TRANSFORMER_SUCCEED_RECORDS = "totalTransformerSuccessRecords";
+    public static final String TRANSFORMER_FAILED_RECORDS = "totalTransformerFailedRecords";
+    public static final String TRANSFORMER_FILTER_RECORDS = "totalTransformerFilterRecords";
+    public static final String TRANSFORMER_NAME_PREFIX = "usedTimeByTransformer_";
+
+    public static Communication getReportCommunication(Communication now, Communication old, int totalStage) {
+        Validate.isTrue(now != null && old != null,
+                "为汇报准备的新旧metric不能为null");
+
+        long totalReadRecords = getTotalReadRecords(now);
+        long totalReadBytes = getTotalReadBytes(now);
+        now.setLongCounter(TOTAL_READ_RECORDS, totalReadRecords);
+        now.setLongCounter(TOTAL_READ_BYTES, totalReadBytes);
+        now.setLongCounter(TOTAL_ERROR_RECORDS, getTotalErrorRecords(now));
+        now.setLongCounter(TOTAL_ERROR_BYTES, getTotalErrorBytes(now));
+        now.setLongCounter(WRITE_SUCCEED_RECORDS, getWriteSucceedRecords(now));
+        now.setLongCounter(WRITE_SUCCEED_BYTES, getWriteSucceedBytes(now));
+
+        long timeInterval = now.getTimestamp() - old.getTimestamp();
+        long sec = timeInterval <= 1000 ? 1 : timeInterval / 1000;
+        long bytesSpeed = (totalReadBytes
+                - getTotalReadBytes(old)) / sec;
+        long recordsSpeed = (totalReadRecords
+                - getTotalReadRecords(old)) / sec;
+
+        now.setLongCounter(BYTE_SPEED, bytesSpeed < 0 ? 0 : bytesSpeed);
+        now.setLongCounter(RECORD_SPEED, recordsSpeed < 0 ? 0 : recordsSpeed);
+        now.setDoubleCounter(PERCENTAGE, now.getLongCounter(STAGE) / (double) totalStage);
+
+        if (old.getThrowable() != null) {
+            now.setThrowable(old.getThrowable());
+        }
+
+        return now;
+    }
+
+    public static long getTotalReadRecords(final Communication communication) {
+        return communication.getLongCounter(READ_SUCCEED_RECORDS) +
+                communication.getLongCounter(READ_FAILED_RECORDS);
+    }
+
+    public static long getTotalReadBytes(final Communication communication) {
+        return communication.getLongCounter(READ_SUCCEED_BYTES) +
+                communication.getLongCounter(READ_FAILED_BYTES);
+    }
+
+    public static long getTotalErrorRecords(final Communication communication) {
+        return communication.getLongCounter(READ_FAILED_RECORDS) +
+                communication.getLongCounter(WRITE_FAILED_RECORDS);
+    }
+
+    public static long getTotalErrorBytes(final Communication communication) {
+        return communication.getLongCounter(READ_FAILED_BYTES) +
+                communication.getLongCounter(WRITE_FAILED_BYTES);
+    }
+
+    public static long getWriteSucceedRecords(final Communication communication) {
+        return communication.getLongCounter(WRITE_RECEIVED_RECORDS) -
+                communication.getLongCounter(WRITE_FAILED_RECORDS);
+    }
+
+    public static long getWriteSucceedBytes(final Communication communication) {
+        return communication.getLongCounter(WRITE_RECEIVED_BYTES) -
+                communication.getLongCounter(WRITE_FAILED_BYTES);
+    }
+
+    public static class Stringify {
+        private final static DecimalFormat df = new DecimalFormat("0.00");
+
+        public static String getSnapshot(final Communication communication) {
+            StringBuilder sb = new StringBuilder();
+            sb.append("Total ");
+            sb.append(getTotal(communication));
+            sb.append(" | ");
+            sb.append("Speed ");
+            sb.append(getSpeed(communication));
+            sb.append(" | ");
+            sb.append("Error ");
+            sb.append(getError(communication));
+            sb.append(" | ");
+            sb.append(" All Task WaitWriterTime ");
+            sb.append(PerfTrace.unitTime(communication.getLongCounter(WAIT_WRITER_TIME)));
+            sb.append(" | ");
+            sb.append(" All Task WaitReaderTime ");
+            sb.append(PerfTrace.unitTime(communication.getLongCounter(WAIT_READER_TIME)));
+            sb.append(" | ");
+            if (communication.getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME) > 0
+                    || communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS) > 0
+                    ||communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS) > 0
+                    || communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS) > 0) {
+                sb.append("Transfermor Success ");
+                sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS)));
+                sb.append(" | ");
+                sb.append("Transformer Error ");
+                sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS)));
+                sb.append(" | ");
+                sb.append("Transformer Filter ");
+                sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS)));
+                sb.append(" | ");
+                sb.append("Transformer usedTime ");
+                sb.append(PerfTrace.unitTime(communication.getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME)));
+                sb.append(" | ");
+            }
+            sb.append("Percentage ");
+            sb.append(getPercentage(communication));
+            return sb.toString();
+        }
+
+        private static String getTotal(final Communication communication) {
+            return String.format("%d records, %d bytes",
+                    communication.getLongCounter(TOTAL_READ_RECORDS),
+                    communication.getLongCounter(TOTAL_READ_BYTES));
+        }
+
+        private static String getSpeed(final Communication communication) {
+            return String.format("%s/s, %d records/s",
+                    StrUtil.stringify(communication.getLongCounter(BYTE_SPEED)),
+                    communication.getLongCounter(RECORD_SPEED));
+        }
+
+        private static String getError(final Communication communication) {
+            return String.format("%d records, %d bytes",
+                    communication.getLongCounter(TOTAL_ERROR_RECORDS),
+                    communication.getLongCounter(TOTAL_ERROR_BYTES));
+        }
+
+        private static String getPercentage(final Communication communication) {
+            return df.format(communication.getDoubleCounter(PERCENTAGE) * 100) + "%";
+        }
+    }
+
+    public static class Jsonify {
+        @SuppressWarnings("rawtypes")
+        public static String getSnapshot(Communication communication) {
+            Validate.notNull(communication);
+
+            Map<String, Object> state = new HashMap<String, Object>();
+
+            Pair pair = getTotalBytes(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getTotalRecords(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getSpeedRecord(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getSpeedByte(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getStage(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getErrorRecords(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getErrorBytes(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getErrorMessage(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getPercentage(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getWaitReaderTime(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            pair = getWaitWriterTime(communication);
+            state.put((String) pair.getKey(), pair.getValue());
+
+            return JsonUtils.toJsonString(state);
+        }
+
+        private static Pair<String, Long> getTotalBytes(final Communication communication) {
+            return new Pair<String, Long>("totalBytes", communication.getLongCounter(TOTAL_READ_BYTES));
+        }
+
+        private static Pair<String, Long> getTotalRecords(final Communication communication) {
+            return new Pair<String, Long>("totalRecords", communication.getLongCounter(TOTAL_READ_RECORDS));
+        }
+
+        private static Pair<String, Long> getSpeedByte(final Communication communication) {
+            return new Pair<String, Long>("speedBytes", communication.getLongCounter(BYTE_SPEED));
+        }
+
+        private static Pair<String, Long> getSpeedRecord(final Communication communication) {
+            return new Pair<String, Long>("speedRecords", communication.getLongCounter(RECORD_SPEED));
+        }
+
+        private static Pair<String, Long> getErrorRecords(final Communication communication) {
+            return new Pair<String, Long>("errorRecords", communication.getLongCounter(TOTAL_ERROR_RECORDS));
+        }
+
+        private static Pair<String, Long> getErrorBytes(final Communication communication) {
+            return new Pair<String, Long>("errorBytes", communication.getLongCounter(TOTAL_ERROR_BYTES));
+        }
+
+        private static Pair<String, Long> getStage(final Communication communication) {
+            return new Pair<String, Long>("stage", communication.getLongCounter(STAGE));
+        }
+
+        private static Pair<String, Double> getPercentage(final Communication communication) {
+            return new Pair<String, Double>("percentage", communication.getDoubleCounter(PERCENTAGE));
+        }
+
+        private static Pair<String, String> getErrorMessage(final Communication communication) {
+            return new Pair<String, String>("errorMessage", communication.getThrowableMessage());
+        }
+
+        private static Pair<String, Long> getWaitReaderTime(final Communication communication) {
+            return new Pair<String, Long>("waitReaderTime", communication.getLongCounter(CommunicationTool.WAIT_READER_TIME));
+        }
+
+        private static Pair<String, Long> getWaitWriterTime(final Communication communication) {
+            return new Pair<String, Long>("waitWriterTime", communication.getLongCounter(CommunicationTool.WAIT_WRITER_TIME));
+        }
+
+        static class Pair<K, V> {
+            public Pair(final K key, final V value) {
+                this.key = key;
+                this.value = value;
+            }
+
+            public K getKey() {
+                return key;
+            }
+
+            public V getValue() {
+                return value;
+            }
+
+            private K key;
+
+            private V value;
+        }
+    }
+}

+ 62 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/communication/LocalTGCommunicationManager.java

@@ -0,0 +1,62 @@
+package cn.tr.plugin.dataX.core.statistics.communication;
+
+import cn.tr.plugin.dataX.core.job.meta.State;
+import org.apache.commons.lang3.Validate;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public final class LocalTGCommunicationManager {
+    private static Map<Integer, Communication> taskGroupCommunicationMap =
+            new ConcurrentHashMap<Integer, Communication>();
+
+    public static void registerTaskGroupCommunication(
+            int taskGroupId, Communication communication) {
+        taskGroupCommunicationMap.put(taskGroupId, communication);
+    }
+
+    public static Communication getJobCommunication() {
+        Communication communication = new Communication();
+        communication.setState(State.SUCCEEDED);
+
+        for (Communication taskGroupCommunication :
+                taskGroupCommunicationMap.values()) {
+            communication.mergeFrom(taskGroupCommunication);
+        }
+
+        return communication;
+    }
+
+    /**
+     * 采用获取taskGroupId后再获取对应communication的方式,
+     * 防止map遍历时修改,同时也防止对map key-value对的修改
+     *
+     * @return
+     */
+    public static Set<Integer> getTaskGroupIdSet() {
+        return taskGroupCommunicationMap.keySet();
+    }
+
+    public static Communication getTaskGroupCommunication(int taskGroupId) {
+        Validate.isTrue(taskGroupId >= 0, "taskGroupId不能小于0");
+
+        return taskGroupCommunicationMap.get(taskGroupId);
+    }
+
+    public static void updateTaskGroupCommunication(final int taskGroupId,
+                                                    final Communication communication) {
+        Validate.isTrue(taskGroupCommunicationMap.containsKey(
+                taskGroupId), String.format("taskGroupCommunicationMap中没有注册taskGroupId[%d]的Communication," +
+                "无法更新该taskGroup的信息", taskGroupId));
+        taskGroupCommunicationMap.put(taskGroupId, communication);
+    }
+
+    public static void clear() {
+        taskGroupCommunicationMap.clear();
+    }
+
+    public static Map<Integer, Communication> getTaskGroupCommunicationMap() {
+        return taskGroupCommunicationMap;
+    }
+}

+ 68 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/collector/AbstractCollector.java

@@ -0,0 +1,68 @@
+package cn.tr.plugin.dataX.core.statistics.container.collector;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.LocalTGCommunicationManager;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+public abstract class AbstractCollector {
+    private Map<Integer, Communication> taskCommunicationMap = new ConcurrentHashMap<Integer, Communication>();
+    private Long jobId;
+
+    public Map<Integer, Communication> getTaskCommunicationMap() {
+        return taskCommunicationMap;
+    }
+
+    public Long getJobId() {
+        return jobId;
+    }
+
+    public void setJobId(Long jobId) {
+        this.jobId = jobId;
+    }
+
+    public void registerTGCommunication(List<Configuration> taskGroupConfigurationList) {
+        for (Configuration config : taskGroupConfigurationList) {
+            int taskGroupId = config.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID);
+            LocalTGCommunicationManager.registerTaskGroupCommunication(taskGroupId, new Communication());
+        }
+    }
+
+    public void registerTaskCommunication(List<Configuration> taskConfigurationList) {
+        for (Configuration taskConfig : taskConfigurationList) {
+            int taskId = taskConfig.getInt(CoreConstant.TASK_ID);
+            this.taskCommunicationMap.put(taskId, new Communication());
+        }
+    }
+
+    public Communication collectFromTask() {
+        Communication communication = new Communication();
+        communication.setState(State.SUCCEEDED);
+
+        for (Communication taskCommunication :
+                this.taskCommunicationMap.values()) {
+            communication.mergeFrom(taskCommunication);
+        }
+
+        return communication;
+    }
+
+    public abstract Communication collectFromTaskGroup();
+
+    public Map<Integer, Communication> getTGCommunicationMap() {
+        return LocalTGCommunicationManager.getTaskGroupCommunicationMap();
+    }
+
+    public Communication getTGCommunication(Integer taskGroupId) {
+        return LocalTGCommunicationManager.getTaskGroupCommunication(taskGroupId);
+    }
+
+    public Communication getTaskCommunication(Integer taskId) {
+        return this.taskCommunicationMap.get(taskId);
+    }
+}

+ 17 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/collector/ProcessInnerCollector.java

@@ -0,0 +1,17 @@
+package cn.tr.plugin.dataX.core.statistics.container.collector;
+
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.LocalTGCommunicationManager;
+
+public class ProcessInnerCollector extends AbstractCollector {
+
+    public ProcessInnerCollector(Long jobId) {
+        super.setJobId(jobId);
+    }
+
+    @Override
+    public Communication collectFromTaskGroup() {
+        return LocalTGCommunicationManager.getJobCommunication();
+    }
+
+}

+ 88 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/AbstractContainerCommunicator.java

@@ -0,0 +1,88 @@
+package cn.tr.plugin.dataX.core.statistics.container.communicator;
+
+
+import cn.tr.plugin.dataX.common.statistics.VMInfo;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.container.collector.AbstractCollector;
+import cn.tr.plugin.dataX.core.statistics.container.report.AbstractReporter;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+
+import java.util.List;
+import java.util.Map;
+
+public abstract class AbstractContainerCommunicator {
+    private Configuration configuration;
+    private AbstractCollector collector;
+    private AbstractReporter reporter;
+
+    private Long jobId;
+
+    private VMInfo vmInfo = VMInfo.getVmInfo();
+    private long lastReportTime = System.currentTimeMillis();
+
+
+    public Configuration getConfiguration() {
+        return this.configuration;
+    }
+
+    public AbstractCollector getCollector() {
+        return collector;
+    }
+
+    public AbstractReporter getReporter() {
+        return reporter;
+    }
+
+    public void setCollector(AbstractCollector collector) {
+        this.collector = collector;
+    }
+
+    public void setReporter(AbstractReporter reporter) {
+        this.reporter = reporter;
+    }
+
+    public Long getJobId() {
+        return jobId;
+    }
+
+    public AbstractContainerCommunicator(Configuration configuration) {
+        this.configuration = configuration;
+        this.jobId = configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID);
+    }
+
+
+    public abstract void registerCommunication(List<Configuration> configurationList);
+
+    public abstract Communication collect();
+
+    public abstract void report(Communication communication);
+
+    public abstract State collectState();
+
+    public abstract Communication getCommunication(Integer id);
+
+    /**
+     * 当 实现是 TGContainerCommunicator 时,返回的 Map: key=taskId, value=Communication
+     * 当 实现是 JobContainerCommunicator 时,返回的 Map: key=taskGroupId, value=Communication
+     */
+    public abstract Map<Integer, Communication> getCommunicationMap();
+
+    public void resetCommunication(Integer id){
+        Map<Integer, Communication> map = getCommunicationMap();
+        map.put(id, new Communication());
+    }
+
+    public void reportVmInfo(){
+        long now = System.currentTimeMillis();
+        //每5分钟打印一次
+        if(now - lastReportTime >= 300000) {
+            //当前仅打印
+            if (vmInfo != null) {
+                vmInfo.getDelta(true);
+            }
+            lastReportTime = now;
+        }
+    }
+}

+ 63 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java

@@ -0,0 +1,63 @@
+package cn.tr.plugin.dataX.core.statistics.container.communicator.job;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.statistics.container.collector.ProcessInnerCollector;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.statistics.container.report.ProcessInnerReporter;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+
+public class StandAloneJobContainerCommunicator extends AbstractContainerCommunicator {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(StandAloneJobContainerCommunicator.class);
+
+    public StandAloneJobContainerCommunicator(Configuration configuration) {
+        super(configuration);
+        super.setCollector(new ProcessInnerCollector(configuration.getLong(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID)));
+        super.setReporter(new ProcessInnerReporter());
+    }
+
+    @Override
+    public void registerCommunication(List<Configuration> configurationList) {
+        super.getCollector().registerTGCommunication(configurationList);
+    }
+
+    @Override
+    public Communication collect() {
+        return super.getCollector().collectFromTaskGroup();
+    }
+
+    @Override
+    public State collectState() {
+        return this.collect().getState();
+    }
+
+    /**
+     * 和 DistributeJobContainerCollector 的 report 实现一样
+     */
+    @Override
+    public void report(Communication communication) {
+        super.getReporter().reportJobCommunication(super.getJobId(), communication);
+
+        LOG.info(CommunicationTool.Stringify.getSnapshot(communication));
+        reportVmInfo();
+    }
+
+    @Override
+    public Communication getCommunication(Integer taskGroupId) {
+        return super.getCollector().getTGCommunication(taskGroupId);
+    }
+
+    @Override
+    public Map<Integer, Communication> getCommunicationMap() {
+        return super.getCollector().getTGCommunicationMap();
+    }
+}

+ 74 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java

@@ -0,0 +1,74 @@
+package cn.tr.plugin.dataX.core.statistics.container.communicator.taskgroup;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.container.collector.ProcessInnerCollector;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * 该类是用于处理 taskGroupContainer 的 communication 的收集汇报的父类
+ * 主要是 taskCommunicationMap 记录了 taskExecutor 的 communication 属性
+ */
+public abstract class AbstractTGContainerCommunicator extends AbstractContainerCommunicator {
+
+    protected long jobId;
+
+    /**
+     * 由于taskGroupContainer是进程内部调度
+     * 其registerCommunication(),getCommunication(),
+     * getCommunications(),collect()等方法是一致的
+     * 所有TG的Collector都是ProcessInnerCollector
+     */
+    protected int taskGroupId;
+
+    public AbstractTGContainerCommunicator(Configuration configuration) {
+        super(configuration);
+        this.jobId = configuration.getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID);
+        super.setCollector(new ProcessInnerCollector(this.jobId));
+        this.taskGroupId = configuration.getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID);
+    }
+
+    @Override
+    public void registerCommunication(List<Configuration> configurationList) {
+        super.getCollector().registerTaskCommunication(configurationList);
+    }
+
+    @Override
+    public final Communication collect() {
+        return this.getCollector().collectFromTask();
+    }
+
+    @Override
+    public final State collectState() {
+        Communication communication = new Communication();
+        communication.setState(State.SUCCEEDED);
+
+        for (Communication taskCommunication :
+                super.getCollector().getTaskCommunicationMap().values()) {
+            communication.mergeStateFrom(taskCommunication);
+        }
+
+        return communication.getState();
+    }
+
+    @Override
+    public final Communication getCommunication(Integer taskId) {
+        Validate.isTrue(taskId >= 0, "注册的taskId不能小于0");
+
+        return super.getCollector().getTaskCommunication(taskId);
+    }
+
+    @Override
+    public final Map<Integer, Communication> getCommunicationMap() {
+        return super.getCollector().getTaskCommunicationMap();
+    }
+
+}

+ 19 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java

@@ -0,0 +1,19 @@
+package cn.tr.plugin.dataX.core.statistics.container.communicator.taskgroup;
+
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.container.report.ProcessInnerReporter;
+
+public class StandaloneTGContainerCommunicator extends AbstractTGContainerCommunicator {
+
+    public StandaloneTGContainerCommunicator(Configuration configuration) {
+        super(configuration);
+        super.setReporter(new ProcessInnerReporter());
+    }
+
+    @Override
+    public void report(Communication communication) {
+        super.getReporter().reportTGCommunication(super.taskGroupId, communication);
+    }
+
+}

+ 11 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/report/AbstractReporter.java

@@ -0,0 +1,11 @@
+package cn.tr.plugin.dataX.core.statistics.container.report;
+
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+
+public abstract class AbstractReporter {
+
+    public abstract void reportJobCommunication(Long jobId, Communication communication);
+
+    public abstract void reportTGCommunication(Integer taskGroupId, Communication communication);
+
+}

+ 18 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/container/report/ProcessInnerReporter.java

@@ -0,0 +1,18 @@
+package cn.tr.plugin.dataX.core.statistics.container.report;
+
+
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.LocalTGCommunicationManager;
+
+public class ProcessInnerReporter extends AbstractReporter {
+
+    @Override
+    public void reportJobCommunication(Long jobId, Communication communication) {
+        // do nothing
+    }
+
+    @Override
+    public void reportTGCommunication(Integer taskGroupId, Communication communication) {
+        LocalTGCommunicationManager.updateTaskGroupCommunication(taskGroupId, communication);
+    }
+}

+ 31 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/DefaultJobPluginCollector.java

@@ -0,0 +1,31 @@
+package cn.tr.plugin.dataX.core.statistics.plugin;
+
+import cn.tr.plugin.dataX.common.plugin.JobPluginCollector;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.AbstractContainerCommunicator;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by jingxing on 14-9-9.
+ */
+public final class DefaultJobPluginCollector implements JobPluginCollector {
+    private AbstractContainerCommunicator jobCollector;
+
+    public DefaultJobPluginCollector(AbstractContainerCommunicator containerCollector) {
+        this.jobCollector = containerCollector;
+    }
+
+    @Override
+    public Map<String, List<String>> getMessage() {
+        Communication totalCommunication = this.jobCollector.collect();
+        return totalCommunication.getMessage();
+    }
+
+    @Override
+    public List<String> getMessage(String key) {
+        Communication totalCommunication = this.jobCollector.collect();
+        return totalCommunication.getMessage(key);
+    }
+}

+ 76 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/AbstractTaskPluginCollector.java

@@ -0,0 +1,76 @@
+package cn.tr.plugin.dataX.core.statistics.plugin.task;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.constant.PluginType;
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.plugin.TaskPluginCollector;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Created by jingxing on 14-9-11.
+ */
+public abstract class AbstractTaskPluginCollector extends TaskPluginCollector {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(AbstractTaskPluginCollector.class);
+
+    private Communication communication;
+
+    private Configuration configuration;
+
+    private PluginType pluginType;
+
+    public AbstractTaskPluginCollector(Configuration conf, Communication communication,
+                                       PluginType type) {
+        this.configuration = conf;
+        this.communication = communication;
+        this.pluginType = type;
+    }
+
+    public Communication getCommunication() {
+        return communication;
+    }
+
+    public Configuration getConfiguration() {
+        return configuration;
+    }
+
+    public PluginType getPluginType() {
+        return pluginType;
+    }
+
+    @Override
+    final public void collectMessage(String key, String value) {
+        this.communication.addMessage(key, value);
+    }
+
+    @Override
+    public void collectDirtyRecord(Record dirtyRecord, Throwable t,
+                                   String errorMessage) {
+
+        if (null == dirtyRecord) {
+            LOG.warn("脏数据record=null.");
+            return;
+        }
+
+        if (this.pluginType.equals(PluginType.READER)) {
+            this.communication.increaseCounter(
+                    CommunicationTool.READ_FAILED_RECORDS, 1);
+            this.communication.increaseCounter(
+                    CommunicationTool.READ_FAILED_BYTES, dirtyRecord.getByteSize());
+        } else if (this.pluginType.equals(PluginType.WRITER)) {
+            this.communication.increaseCounter(
+                    CommunicationTool.WRITE_FAILED_RECORDS, 1);
+            this.communication.increaseCounter(
+                    CommunicationTool.WRITE_FAILED_BYTES, dirtyRecord.getByteSize());
+        } else {
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR,
+                    String.format("不知道的插件类型[%s].", this.pluginType));
+        }
+    }
+}

+ 24 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/HttpPluginCollector.java

@@ -0,0 +1,24 @@
+package cn.tr.plugin.dataX.core.statistics.plugin.task;
+
+
+import cn.tr.plugin.dataX.common.constant.PluginType;
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+
+/**
+ * Created by jingxing on 14-9-9.
+ */
+public class HttpPluginCollector extends AbstractTaskPluginCollector {
+    public HttpPluginCollector(Configuration configuration, Communication Communication,
+                               PluginType type) {
+        super(configuration, Communication, type);
+    }
+
+    @Override
+    public void collectDirtyRecord(Record dirtyRecord, Throwable t,
+                                   String errorMessage) {
+        super.collectDirtyRecord(dirtyRecord, t, errorMessage);
+    }
+
+}

+ 73 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/StdoutPluginCollector.java

@@ -0,0 +1,73 @@
+package cn.tr.plugin.dataX.core.statistics.plugin.task;
+
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.constant.PluginType;
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.plugin.task.util.DirtyRecord;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Created by jingxing on 14-9-9.
+ */
+public class StdoutPluginCollector extends AbstractTaskPluginCollector {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(StdoutPluginCollector.class);
+
+    private static final int DEFAULT_MAX_DIRTYNUM = 128;
+
+    private AtomicInteger maxLogNum = new AtomicInteger(0);
+
+    private AtomicInteger currentLogNum = new AtomicInteger(0);
+
+    public StdoutPluginCollector(Configuration configuration, Communication communication,
+                                 PluginType type) {
+        super(configuration, communication, type);
+        maxLogNum = new AtomicInteger(
+                configuration.getInt(
+                        CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM,
+                        DEFAULT_MAX_DIRTYNUM));
+    }
+
+    private String formatDirty(final Record dirty, final Throwable t,
+                               final String msg) {
+        Map<String, Object> msgGroup = new HashMap<String, Object>();
+
+        msgGroup.put("type", super.getPluginType().toString());
+        if (StringUtils.isNotBlank(msg)) {
+            msgGroup.put("message", msg);
+        }
+        if (null != t && StringUtils.isNotBlank(t.getMessage())) {
+            msgGroup.put("exception", t.getMessage());
+        }
+        if (null != dirty) {
+            msgGroup.put("record", DirtyRecord.asDirtyRecord(dirty)
+                    .getColumns());
+        }
+
+        return JsonUtils.toJsonString(msgGroup);
+    }
+
+    @Override
+    public void collectDirtyRecord(Record dirtyRecord, Throwable t,
+                                   String errorMessage) {
+        int logNum = currentLogNum.getAndIncrement();
+        if(logNum==0 && t!=null){
+            LOG.error("", t);
+        }
+        if (maxLogNum.intValue() < 0 || currentLogNum.intValue() < maxLogNum.intValue()) {
+            LOG.error("脏数据: \n"
+                    + this.formatDirty(dirtyRecord, t, errorMessage));
+        }
+
+        super.collectDirtyRecord(dirtyRecord, t, errorMessage);
+    }
+}

+ 171 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/statistics/plugin/task/util/DirtyRecord.java

@@ -0,0 +1,171 @@
+package cn.tr.plugin.dataX.core.statistics.plugin.task.util;
+
+
+
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.element.Column;
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.*;
+import java.util.Date;
+import java.util.Map;
+
+public class DirtyRecord implements Record {
+	private List<Column> columns = new ArrayList<Column>();
+	private Map<String, String> meta;
+
+	public static DirtyRecord asDirtyRecord(final Record record) {
+		DirtyRecord result = new DirtyRecord();
+		for (int i = 0; i < record.getColumnNumber(); i++) {
+			result.addColumn(record.getColumn(i));
+		}
+		result.setMeta(record.getMeta());
+
+		return result;
+	}
+
+	@Override
+	public void addColumn(Column column) {
+		this.columns.add(
+                DirtyColumn.asDirtyColumn(column, this.columns.size()));
+	}
+
+	@Override
+	public String toString() {
+		return JsonUtils.toJsonString(this.columns);
+	}
+
+	@Override
+	public void setColumn(int i, Column column) {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public Column getColumn(int i) {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public int getColumnNumber() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public int getByteSize() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public int getMemorySize() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public void setMeta(Map<String, String> meta) {
+		this.meta = meta;
+	}
+
+	@Override
+	public Map<String, String> getMeta() {
+		return this.meta;
+	}
+
+	public List<Column> getColumns() {
+		return columns;
+	}
+
+	public void setColumns(List<Column> columns) {
+		this.columns = columns;
+	}
+
+}
+
+class DirtyColumn extends Column {
+	private int index;
+
+	public static Column asDirtyColumn(final Column column, int index) {
+		return new DirtyColumn(column, index);
+	}
+
+	private DirtyColumn(Column column, int index) {
+		this(null == column ? null : column.getRawData(),
+				null == column ? Column.Type.NULL : column.getType(),
+				null == column ? 0 : column.getByteSize(), index);
+	}
+
+	public int getIndex() {
+		return index;
+	}
+
+	public void setIndex(int index) {
+		this.index = index;
+	}
+
+	@Override
+	public Long asLong() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public Double asDouble() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public String asString() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public Date asDate() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+	
+	@Override
+	public Date asDate(String dateFormat) {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public byte[] asBytes() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public Boolean asBoolean() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public BigDecimal asBigDecimal() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	@Override
+	public BigInteger asBigInteger() {
+		throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR,
+				"该方法不支持!");
+	}
+
+	private DirtyColumn(Object object, Type type, int byteSize, int index) {
+		super(object, type, byteSize);
+		this.setIndex(index);
+	}
+}

+ 567 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/TaskGroupContainer.java

@@ -0,0 +1,567 @@
+package cn.tr.plugin.dataX.core.taskgroup;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.core.utils.JsonUtils;
+import cn.tr.plugin.dataX.common.constant.PluginType;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.plugin.RecordSender;
+import cn.tr.plugin.dataX.common.plugin.TaskPluginCollector;
+import cn.tr.plugin.dataX.common.statistics.PerfRecord;
+import cn.tr.plugin.dataX.common.statistics.PerfTrace;
+import cn.tr.plugin.dataX.common.statistics.VMInfo;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.AbstractContainer;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.statistics.container.communicator.taskgroup.StandaloneTGContainerCommunicator;
+import cn.tr.plugin.dataX.core.statistics.plugin.task.AbstractTaskPluginCollector;
+import cn.tr.plugin.dataX.core.taskgroup.runner.AbstractRunner;
+import cn.tr.plugin.dataX.core.taskgroup.runner.ReaderRunner;
+import cn.tr.plugin.dataX.core.taskgroup.runner.WriterRunner;
+import cn.tr.plugin.dataX.core.transport.channel.Channel;
+import cn.tr.plugin.dataX.core.transport.exchanger.BufferedRecordExchanger;
+import cn.tr.plugin.dataX.core.transport.exchanger.BufferedRecordTransformerExchanger;
+import cn.tr.plugin.dataX.core.transport.transformer.TransformerExecution;
+import cn.tr.plugin.dataX.core.util.ClassUtil;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+import cn.tr.plugin.dataX.core.util.TransformerUtil;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import cn.tr.plugin.dataX.core.util.container.LoadUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+public class TaskGroupContainer extends AbstractContainer {
+    private static final Logger LOG = LoggerFactory
+            .getLogger(TaskGroupContainer.class);
+
+    /**
+     * 当前taskGroup所属jobId
+     */
+    private long jobId;
+
+    /**
+     * 当前taskGroupId
+     */
+    private int taskGroupId;
+
+    /**
+     * 使用的channel类
+     */
+    private String channelClazz;
+
+    /**
+     * task收集器使用的类
+     */
+    private String taskCollectorClass;
+
+    private TaskMonitor taskMonitor = TaskMonitor.getInstance();
+
+    public TaskGroupContainer(Configuration configuration) {
+        super(configuration);
+
+        initCommunicator(configuration);
+
+        this.jobId = this.configuration.getLong(
+                CoreConstant.DATAX_CORE_CONTAINER_JOB_ID);
+        this.taskGroupId = this.configuration.getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID);
+
+        this.channelClazz = this.configuration.getString(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CLASS);
+        this.taskCollectorClass = this.configuration.getString(
+                CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_TASKCLASS);
+    }
+
+    private void initCommunicator(Configuration configuration) {
+        super.setContainerCommunicator(new StandaloneTGContainerCommunicator(configuration));
+
+    }
+
+    public long getJobId() {
+        return jobId;
+    }
+
+    public int getTaskGroupId() {
+        return taskGroupId;
+    }
+
+    @Override
+    public void start() {
+        try {
+            /**
+             * 状态check时间间隔,较短,可以把任务及时分发到对应channel中
+             */
+            int sleepIntervalInMillSec = this.configuration.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_SLEEPINTERVAL, 100);
+            /**
+             * 状态汇报时间间隔,稍长,避免大量汇报
+             */
+            long reportIntervalInMillSec = this.configuration.getLong(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_REPORTINTERVAL,
+                    10000);
+            /**
+             * 2分钟汇报一次性能统计
+             */
+
+            // 获取channel数目
+            int channelNumber = this.configuration.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL);
+
+            int taskMaxRetryTimes = this.configuration.getInt(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXRETRYTIMES, 1);
+
+            long taskRetryIntervalInMsec = this.configuration.getLong(
+                    CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_RETRYINTERVALINMSEC, 10000);
+
+            long taskMaxWaitInMsec = this.configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXWAITINMSEC, 60000);
+            
+            List<Configuration> taskConfigs = this.configuration
+                    .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT);
+
+            if(LOG.isDebugEnabled()) {
+                LOG.debug("taskGroup[{}]'s task configs[{}]", this.taskGroupId,
+                        JsonUtils.toJsonString(taskConfigs));
+            }
+            
+            int taskCountInThisTaskGroup = taskConfigs.size();
+            LOG.info(String.format(
+                    "taskGroupId=[%d] start [%d] channels for [%d] tasks.",
+                    this.taskGroupId, channelNumber, taskCountInThisTaskGroup));
+            
+            this.containerCommunicator.registerCommunication(taskConfigs);
+
+            Map<Integer, Configuration> taskConfigMap = buildTaskConfigMap(taskConfigs); //taskId与task配置
+            List<Configuration> taskQueue = buildRemainTasks(taskConfigs); //待运行task列表
+            Map<Integer, TaskExecutor> taskFailedExecutorMap = new HashMap<Integer, TaskExecutor>(); //taskId与上次失败实例
+            List<TaskExecutor> runTasks = new ArrayList<TaskExecutor>(channelNumber); //正在运行task
+            Map<Integer, Long> taskStartTimeMap = new HashMap<Integer, Long>(); //任务开始时间
+
+            long lastReportTimeStamp = 0;
+            Communication lastTaskGroupContainerCommunication = new Communication();
+
+            while (true) {
+            	//1.判断task状态
+            	boolean failedOrKilled = false;
+            	Map<Integer, Communication> communicationMap = containerCommunicator.getCommunicationMap();
+            	for(Map.Entry<Integer, Communication> entry : communicationMap.entrySet()){
+            		Integer taskId = entry.getKey();
+            		Communication taskCommunication = entry.getValue();
+                    if(!taskCommunication.isFinished()){
+                        continue;
+                    }
+                    TaskExecutor taskExecutor = removeTask(runTasks, taskId);
+
+                    //上面从runTasks里移除了,因此对应在monitor里移除
+                    taskMonitor.removeTask(taskId);
+
+                    //失败,看task是否支持failover,重试次数未超过最大限制
+            		if(taskCommunication.getState() == State.FAILED){
+                        taskFailedExecutorMap.put(taskId, taskExecutor);
+            			if(taskExecutor.supportFailOver() && taskExecutor.getAttemptCount() < taskMaxRetryTimes){
+                            taskExecutor.shutdown(); //关闭老的executor
+                            containerCommunicator.resetCommunication(taskId); //将task的状态重置
+            				Configuration taskConfig = taskConfigMap.get(taskId);
+            				taskQueue.add(taskConfig); //重新加入任务列表
+            			}else{
+            				failedOrKilled = true;
+                			break;
+            			}
+            		}else if(taskCommunication.getState() == State.KILLED){
+            			failedOrKilled = true;
+            			break;
+            		}else if(taskCommunication.getState() == State.SUCCEEDED){
+                        Long taskStartTime = taskStartTimeMap.get(taskId);
+                        if(taskStartTime != null){
+                            Long usedTime = System.currentTimeMillis() - taskStartTime;
+                            LOG.info("taskGroup[{}] taskId[{}] is successed, used[{}]ms",
+                                    this.taskGroupId, taskId, usedTime);
+                            //usedTime*1000*1000 转换成PerfRecord记录的ns,这里主要是简单登记,进行最长任务的打印。因此增加特定静态方法
+                            PerfRecord.addPerfRecord(taskGroupId, taskId, PerfRecord.PHASE.TASK_TOTAL,taskStartTime, usedTime * 1000L * 1000L);
+                            taskStartTimeMap.remove(taskId);
+                            taskConfigMap.remove(taskId);
+                        }
+                    }
+            	}
+            	
+                // 2.发现该taskGroup下taskExecutor的总状态失败则汇报错误
+                if (failedOrKilled) {
+                    lastTaskGroupContainerCommunication = reportTaskGroupCommunication(
+                            lastTaskGroupContainerCommunication, taskCountInThisTaskGroup);
+
+                    throw DataXException.asDataXException(
+                            FrameworkErrorCode.PLUGIN_RUNTIME_ERROR, lastTaskGroupContainerCommunication.getThrowable());
+                }
+                
+                //3.有任务未执行,且正在运行的任务数小于最大通道限制
+                Iterator<Configuration> iterator = taskQueue.iterator();
+                while(iterator.hasNext() && runTasks.size() < channelNumber){
+                    Configuration taskConfig = iterator.next();
+                    Integer taskId = taskConfig.getInt(CoreConstant.TASK_ID);
+                    int attemptCount = 1;
+                    TaskExecutor lastExecutor = taskFailedExecutorMap.get(taskId);
+                    if(lastExecutor!=null){
+                        attemptCount = lastExecutor.getAttemptCount() + 1;
+                        long now = System.currentTimeMillis();
+                        long failedTime = lastExecutor.getTimeStamp();
+                        if(now - failedTime < taskRetryIntervalInMsec){  //未到等待时间,继续留在队列
+                            continue;
+                        }
+                        if(!lastExecutor.isShutdown()){ //上次失败的task仍未结束
+                            if(now - failedTime > taskMaxWaitInMsec){
+                                markCommunicationFailed(taskId);
+                                reportTaskGroupCommunication(lastTaskGroupContainerCommunication, taskCountInThisTaskGroup);
+                                throw DataXException.asDataXException(TRExcCode.WAIT_TIME_EXCEED, "task failover等待超时");
+                            }else{
+                                lastExecutor.shutdown(); //再次尝试关闭
+                                continue;
+                            }
+                        }else{
+                            LOG.info("taskGroup[{}] taskId[{}] attemptCount[{}] has already shutdown",
+                                    this.taskGroupId, taskId, lastExecutor.getAttemptCount());
+                        }
+                    }
+                    Configuration taskConfigForRun = taskMaxRetryTimes > 1 ? taskConfig.clone() : taskConfig;
+                	TaskExecutor taskExecutor = new TaskExecutor(taskConfigForRun, attemptCount);
+                    taskStartTimeMap.put(taskId, System.currentTimeMillis());
+                	taskExecutor.doStart();
+
+                    iterator.remove();
+                    runTasks.add(taskExecutor);
+
+                    //上面,增加task到runTasks列表,因此在monitor里注册。
+                    taskMonitor.registerTask(taskId, this.containerCommunicator.getCommunication(taskId));
+
+                    taskFailedExecutorMap.remove(taskId);
+                    LOG.info("taskGroup[{}] taskId[{}] attemptCount[{}] is started",
+                            this.taskGroupId, taskId, attemptCount);
+                }
+
+                //4.任务列表为空,executor已结束, 搜集状态为success--->成功
+                if (taskQueue.isEmpty() && isAllTaskDone(runTasks) && containerCommunicator.collectState() == State.SUCCEEDED) {
+                	// 成功的情况下,也需要汇报一次。否则在任务结束非常快的情况下,采集的信息将会不准确
+                    lastTaskGroupContainerCommunication = reportTaskGroupCommunication(
+                            lastTaskGroupContainerCommunication, taskCountInThisTaskGroup);
+
+                    LOG.info("taskGroup[{}] completed it's tasks.", this.taskGroupId);
+                    break;
+                }
+
+                // 5.如果当前时间已经超出汇报时间的interval,那么我们需要马上汇报
+                long now = System.currentTimeMillis();
+                if (now - lastReportTimeStamp > reportIntervalInMillSec) {
+                    lastTaskGroupContainerCommunication = reportTaskGroupCommunication(
+                            lastTaskGroupContainerCommunication, taskCountInThisTaskGroup);
+
+                    lastReportTimeStamp = now;
+
+                    //taskMonitor对于正在运行的task,每reportIntervalInMillSec进行检查
+                    for(TaskExecutor taskExecutor:runTasks){
+                        taskMonitor.report(taskExecutor.getTaskId(),this.containerCommunicator.getCommunication(taskExecutor.getTaskId()));
+                    }
+
+                }
+
+                Thread.sleep(sleepIntervalInMillSec);
+            }
+
+            //6.最后还要汇报一次
+            reportTaskGroupCommunication(lastTaskGroupContainerCommunication, taskCountInThisTaskGroup);
+
+
+        } catch (Throwable e) {
+            Communication nowTaskGroupContainerCommunication = this.containerCommunicator.collect();
+
+            if (nowTaskGroupContainerCommunication.getThrowable() == null) {
+                nowTaskGroupContainerCommunication.setThrowable(e);
+            }
+            nowTaskGroupContainerCommunication.setState(State.FAILED);
+            this.containerCommunicator.report(nowTaskGroupContainerCommunication);
+
+            throw DataXException.asDataXException(
+                    TRExcCode.RUNTIME_ERROR, e);
+        }finally {
+            if(!PerfTrace.getInstance().isJob()){
+                //最后打印cpu的平均消耗,GC的统计
+                VMInfo vmInfo = VMInfo.getVmInfo();
+                if (vmInfo != null) {
+                    vmInfo.getDelta(false);
+                    LOG.info(vmInfo.totalString());
+                }
+
+                LOG.info(PerfTrace.getInstance().summarizeNoException());
+            }
+        }
+    }
+    
+    private Map<Integer, Configuration> buildTaskConfigMap(List<Configuration> configurations){
+    	Map<Integer, Configuration> map = new HashMap<Integer, Configuration>();
+    	for(Configuration taskConfig : configurations){
+        	int taskId = taskConfig.getInt(CoreConstant.TASK_ID);
+        	map.put(taskId, taskConfig);
+    	}
+    	return map;
+    }
+
+    private List<Configuration> buildRemainTasks(List<Configuration> configurations){
+    	List<Configuration> remainTasks = new LinkedList<Configuration>();
+    	for(Configuration taskConfig : configurations){
+    		remainTasks.add(taskConfig);
+    	}
+    	return remainTasks;
+    }
+    
+    private TaskExecutor removeTask(List<TaskExecutor> taskList, int taskId){
+    	Iterator<TaskExecutor> iterator = taskList.iterator();
+    	while(iterator.hasNext()){
+    		TaskExecutor taskExecutor = iterator.next();
+    		if(taskExecutor.getTaskId() == taskId){
+    			iterator.remove();
+    			return taskExecutor;
+    		}
+    	}
+    	return null;
+    }
+    
+    private boolean isAllTaskDone(List<TaskExecutor> taskList){
+    	for(TaskExecutor taskExecutor : taskList){
+    		if(!taskExecutor.isTaskFinished()){
+    			return false;
+    		}
+    	}
+    	return true;
+    }
+
+    private Communication reportTaskGroupCommunication(Communication lastTaskGroupContainerCommunication, int taskCount){
+        Communication nowTaskGroupContainerCommunication = this.containerCommunicator.collect();
+        nowTaskGroupContainerCommunication.setTimestamp(System.currentTimeMillis());
+        Communication reportCommunication = CommunicationTool.getReportCommunication(nowTaskGroupContainerCommunication,
+                lastTaskGroupContainerCommunication, taskCount);
+        this.containerCommunicator.report(reportCommunication);
+        return reportCommunication;
+    }
+
+    private void markCommunicationFailed(Integer taskId){
+        Communication communication = containerCommunicator.getCommunication(taskId);
+        communication.setState(State.FAILED);
+    }
+
+    /**
+     * TaskExecutor是一个完整task的执行器
+     * 其中包括1:1的reader和writer
+     */
+    class TaskExecutor {
+        private Configuration taskConfig;
+
+        private int taskId;
+
+        private int attemptCount;
+
+        private Channel channel;
+
+        private Thread readerThread;
+
+        private Thread writerThread;
+        
+        private ReaderRunner readerRunner;
+        
+        private WriterRunner writerRunner;
+
+        /**
+         * 该处的taskCommunication在多处用到:
+         * 1. channel
+         * 2. readerRunner和writerRunner
+         * 3. reader和writer的taskPluginCollector
+         */
+        private Communication taskCommunication;
+
+        public TaskExecutor(Configuration taskConf, int attemptCount) {
+            // 获取该taskExecutor的配置
+            this.taskConfig = taskConf;
+            Validate.isTrue(null != this.taskConfig.getConfiguration(CoreConstant.JOB_READER)
+                            && null != this.taskConfig.getConfiguration(CoreConstant.JOB_WRITER),
+                    "[reader|writer]的插件参数不能为空!");
+
+            // 得到taskId
+            this.taskId = this.taskConfig.getInt(CoreConstant.TASK_ID);
+            this.attemptCount = attemptCount;
+
+            /**
+             * 由taskId得到该taskExecutor的Communication
+             * 要传给readerRunner和writerRunner,同时要传给channel作统计用
+             */
+            this.taskCommunication = containerCommunicator
+                    .getCommunication(taskId);
+            Validate.notNull(this.taskCommunication,
+                    String.format("taskId[%d]的Communication没有注册过", taskId));
+            this.channel = ClassUtil.instantiate(channelClazz,
+                    Channel.class, configuration);
+            this.channel.setCommunication(this.taskCommunication);
+
+            /**
+             * 获取transformer的参数
+             */
+
+            List<TransformerExecution> transformerInfoExecs = TransformerUtil.buildTransformerInfo(taskConfig);
+
+            /**
+             * 生成writerThread
+             */
+            writerRunner = (WriterRunner) generateRunner(PluginType.WRITER);
+            this.writerThread = new Thread(writerRunner,
+                    String.format("%d-%d-%d-writer",
+                            jobId, taskGroupId, this.taskId));
+            //通过设置thread的contextClassLoader,即可实现同步和主程序不通的加载器
+            this.writerThread.setContextClassLoader(LoadUtil.getJarLoader(
+                    PluginType.WRITER, this.taskConfig.getString(
+                            CoreConstant.JOB_WRITER_NAME)));
+
+            /**
+             * 生成readerThread
+             */
+            readerRunner = (ReaderRunner) generateRunner(PluginType.READER,transformerInfoExecs);
+            this.readerThread = new Thread(readerRunner,
+                    String.format("%d-%d-%d-reader",
+                            jobId, taskGroupId, this.taskId));
+            /**
+             * 通过设置thread的contextClassLoader,即可实现同步和主程序不通的加载器
+             */
+            this.readerThread.setContextClassLoader(LoadUtil.getJarLoader(
+                    PluginType.READER, this.taskConfig.getString(
+                            CoreConstant.JOB_READER_NAME)));
+        }
+
+        public void doStart() {
+            this.writerThread.start();
+
+            // reader没有起来,writer不可能结束
+            if (!this.writerThread.isAlive() || this.taskCommunication.getState() == State.FAILED) {
+                throw DataXException.asDataXException(
+                        TRExcCode.RUNTIME_ERROR,
+                        this.taskCommunication.getThrowable());
+            }
+
+            this.readerThread.start();
+
+            // 这里reader可能很快结束
+            if (!this.readerThread.isAlive() && this.taskCommunication.getState() == State.FAILED) {
+                // 这里有可能出现Reader线上启动即挂情况 对于这类情况 需要立刻抛出异常
+                throw DataXException.asDataXException(
+                        TRExcCode.RUNTIME_ERROR,
+                        this.taskCommunication.getThrowable());
+            }
+
+        }
+
+
+        private AbstractRunner generateRunner(PluginType pluginType) {
+            return generateRunner(pluginType, null);
+        }
+
+        private AbstractRunner generateRunner(PluginType pluginType, List<TransformerExecution> transformerInfoExecs) {
+            AbstractRunner newRunner = null;
+            TaskPluginCollector pluginCollector;
+
+            switch (pluginType) {
+                case READER:
+                    newRunner = LoadUtil.loadPluginRunner(pluginType,
+                            this.taskConfig.getString(CoreConstant.JOB_READER_NAME));
+                    newRunner.setJobConf(this.taskConfig.getConfiguration(
+                            CoreConstant.JOB_READER_PARAMETER));
+
+                    pluginCollector = ClassUtil.instantiate(
+                            taskCollectorClass, AbstractTaskPluginCollector.class,
+                            configuration, this.taskCommunication,
+                            PluginType.READER);
+
+                    RecordSender recordSender;
+                    if (transformerInfoExecs != null && transformerInfoExecs.size() > 0) {
+                        recordSender = new BufferedRecordTransformerExchanger(taskGroupId, this.taskId, this.channel,this.taskCommunication ,pluginCollector, transformerInfoExecs);
+                    } else {
+                        recordSender = new BufferedRecordExchanger(this.channel, pluginCollector);
+                    }
+
+                    ((ReaderRunner) newRunner).setRecordSender(recordSender);
+
+                    /**
+                     * 设置taskPlugin的collector,用来处理脏数据和job/task通信
+                     */
+                    newRunner.setTaskPluginCollector(pluginCollector);
+                    break;
+                case WRITER:
+                    newRunner = LoadUtil.loadPluginRunner(pluginType,
+                            this.taskConfig.getString(CoreConstant.JOB_WRITER_NAME));
+                    newRunner.setJobConf(this.taskConfig
+                            .getConfiguration(CoreConstant.JOB_WRITER_PARAMETER));
+
+                    pluginCollector = ClassUtil.instantiate(
+                            taskCollectorClass, AbstractTaskPluginCollector.class,
+                            configuration, this.taskCommunication,
+                            PluginType.WRITER);
+                    ((WriterRunner) newRunner).setRecordReceiver(new BufferedRecordExchanger(
+                            this.channel, pluginCollector));
+                    /**
+                     * 设置taskPlugin的collector,用来处理脏数据和job/task通信
+                     */
+                    newRunner.setTaskPluginCollector(pluginCollector);
+                    break;
+                default:
+                    throw DataXException.asDataXException(FrameworkErrorCode.ARGUMENT_ERROR, "Cant generateRunner for:" + pluginType);
+            }
+
+            newRunner.setTaskGroupId(taskGroupId);
+            newRunner.setTaskId(this.taskId);
+            newRunner.setRunnerCommunication(this.taskCommunication);
+
+            return newRunner;
+        }
+
+        // 检查任务是否结束
+        private boolean isTaskFinished() {
+            // 如果reader 或 writer没有完成工作,那么直接返回工作没有完成
+            if (readerThread.isAlive() || writerThread.isAlive()) {
+                return false;
+            }
+
+            if(taskCommunication==null || !taskCommunication.isFinished()){
+        		return false;
+        	}
+
+            return true;
+        }
+        
+        private int getTaskId(){
+        	return taskId;
+        }
+
+        private long getTimeStamp(){
+            return taskCommunication.getTimestamp();
+        }
+
+        private int getAttemptCount(){
+            return attemptCount;
+        }
+        
+        private boolean supportFailOver(){
+        	return writerRunner.supportFailOver();
+        }
+
+        private void shutdown(){
+            writerRunner.shutdown();
+            readerRunner.shutdown();
+            if(writerThread.isAlive()){
+                writerThread.interrupt();
+            }
+            if(readerThread.isAlive()){
+                readerThread.interrupt();
+            }
+        }
+
+        private boolean isShutdown(){
+            return !readerThread.isAlive() && !writerThread.isAlive();
+        }
+    }
+}

+ 114 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/TaskMonitor.java

@@ -0,0 +1,114 @@
+package cn.tr.plugin.dataX.core.taskgroup;
+
+import cn.tr.core.exception.TRExcCode;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Created by liqiang on 15/7/23.
+ */
+public class TaskMonitor {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TaskMonitor.class);
+    private static final TaskMonitor instance = new TaskMonitor();
+    private static long EXPIRED_TIME = 172800 * 1000;
+
+    private ConcurrentHashMap<Integer, TaskCommunication> tasks = new ConcurrentHashMap<Integer, TaskCommunication>();
+
+    private TaskMonitor() {
+    }
+
+    public static TaskMonitor getInstance() {
+        return instance;
+    }
+
+    public void registerTask(Integer taskid, Communication communication) {
+        //如果task已经finish,直接返回
+        if (communication.isFinished()) {
+            return;
+        }
+        tasks.putIfAbsent(taskid, new TaskCommunication(taskid, communication));
+    }
+
+    public void removeTask(Integer taskid) {
+        tasks.remove(taskid);
+    }
+
+    public void report(Integer taskid, Communication communication) {
+        //如果task已经finish,直接返回
+        if (communication.isFinished()) {
+            return;
+        }
+        if (!tasks.containsKey(taskid)) {
+            LOG.warn("unexpected: taskid({}) missed.", taskid);
+            tasks.putIfAbsent(taskid, new TaskCommunication(taskid, communication));
+        } else {
+            tasks.get(taskid).report(communication);
+        }
+    }
+
+    public TaskCommunication getTaskCommunication(Integer taskid) {
+        return tasks.get(taskid);
+    }
+
+
+    public static class TaskCommunication {
+        private Integer taskid;
+        //记录最后更新的communication
+        private long lastAllReadRecords = -1;
+        //只有第一次,或者统计变更时才会更新TS
+        private long lastUpdateComunicationTS;
+        private long ttl;
+
+        private TaskCommunication(Integer taskid, Communication communication) {
+            this.taskid = taskid;
+            lastAllReadRecords = CommunicationTool.getTotalReadRecords(communication);
+            ttl = System.currentTimeMillis();
+            lastUpdateComunicationTS = ttl;
+        }
+
+        public void report(Communication communication) {
+
+            ttl = System.currentTimeMillis();
+            //采集的数量增长,则变更当前记录, 优先判断这个条件,因为目的是不卡住,而不是expired
+            if (CommunicationTool.getTotalReadRecords(communication) > lastAllReadRecords) {
+                lastAllReadRecords = CommunicationTool.getTotalReadRecords(communication);
+                lastUpdateComunicationTS = ttl;
+            } else if (isExpired(lastUpdateComunicationTS)) {
+                communication.setState(State.FAILED);
+                communication.setTimestamp(ttl);
+                communication.setThrowable(DataXException.asDataXException(TRExcCode.TASK_HUNG_EXPIRED,
+                        String.format("task(%s) hung expired [allReadRecord(%s), elased(%s)]", taskid, lastAllReadRecords, (ttl - lastUpdateComunicationTS))));
+            }
+
+
+        }
+
+        private boolean isExpired(long lastUpdateComunicationTS) {
+            return System.currentTimeMillis() - lastUpdateComunicationTS > EXPIRED_TIME;
+        }
+
+        public Integer getTaskid() {
+            return taskid;
+        }
+
+        public long getLastAllReadRecords() {
+            return lastAllReadRecords;
+        }
+
+        public long getLastUpdateComunicationTS() {
+            return lastUpdateComunicationTS;
+        }
+
+        public long getTtl() {
+            return ttl;
+        }
+    }
+}

+ 115 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/AbstractRunner.java

@@ -0,0 +1,115 @@
+package cn.tr.plugin.dataX.core.taskgroup.runner;
+
+import cn.tr.plugin.dataX.common.plugin.AbstractTaskPlugin;
+import cn.tr.plugin.dataX.common.plugin.TaskPluginCollector;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.util.Validate;
+
+public abstract class AbstractRunner {
+    private AbstractTaskPlugin plugin;
+
+    private Configuration jobConf;
+
+    private Communication runnerCommunication;
+
+    private int taskGroupId;
+
+    private int taskId;
+
+    public AbstractRunner(AbstractTaskPlugin taskPlugin) {
+        this.plugin = taskPlugin;
+    }
+
+    public void destroy() {
+        if (this.plugin != null) {
+            this.plugin.destroy();
+        }
+    }
+
+    public State getRunnerState() {
+        return this.runnerCommunication.getState();
+    }
+
+    public AbstractTaskPlugin getPlugin() {
+        return plugin;
+    }
+
+    public void setPlugin(AbstractTaskPlugin plugin) {
+        this.plugin = plugin;
+    }
+
+    public Configuration getJobConf() {
+        return jobConf;
+    }
+
+    public void setJobConf(Configuration jobConf) {
+        this.jobConf = jobConf;
+        this.plugin.setPluginJobConf(jobConf);
+    }
+
+    public void setTaskPluginCollector(TaskPluginCollector pluginCollector) {
+        this.plugin.setTaskPluginCollector(pluginCollector);
+    }
+
+    private void mark(State state) {
+        this.runnerCommunication.setState(state);
+        if (state == State.SUCCEEDED) {
+            // 对 stage + 1
+            this.runnerCommunication.setLongCounter(CommunicationTool.STAGE,
+                    this.runnerCommunication.getLongCounter(CommunicationTool.STAGE) + 1);
+        }
+    }
+
+    public void markRun() {
+        mark(State.RUNNING);
+    }
+
+    public void markSuccess() {
+        mark(State.SUCCEEDED);
+    }
+
+    public void markFail(final Throwable throwable) {
+        mark(State.FAILED);
+        this.runnerCommunication.setTimestamp(System.currentTimeMillis());
+        this.runnerCommunication.setThrowable(throwable);
+    }
+
+    /**
+     * @param taskGroupId the taskGroupId to set
+     */
+    public void setTaskGroupId(int taskGroupId) {
+        this.taskGroupId = taskGroupId;
+        this.plugin.setTaskGroupId(taskGroupId);
+    }
+
+    /**
+     * @return the taskGroupId
+     */
+    public int getTaskGroupId() {
+        return taskGroupId;
+    }
+
+    public int getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(int taskId) {
+        this.taskId = taskId;
+        this.plugin.setTaskId(taskId);
+    }
+
+    public void setRunnerCommunication(final Communication runnerCommunication) {
+        Validate.notNull(runnerCommunication,
+                "插件的Communication不能为空");
+        this.runnerCommunication = runnerCommunication;
+    }
+
+    public Communication getRunnerCommunication() {
+        return runnerCommunication;
+    }
+
+    public abstract void shutdown();
+}

+ 96 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/ReaderRunner.java

@@ -0,0 +1,96 @@
+package cn.tr.plugin.dataX.core.taskgroup.runner;
+
+import cn.tr.plugin.dataX.common.plugin.AbstractTaskPlugin;
+import cn.tr.plugin.dataX.common.plugin.RecordSender;
+import cn.tr.plugin.dataX.common.spi.Reader;
+import cn.tr.plugin.dataX.common.statistics.PerfRecord;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Created by jingxing on 14-9-1.
+ * <p/>
+ * 单个slice的reader执行调用
+ */
+public class ReaderRunner extends AbstractRunner implements Runnable {
+
+    private static final Logger LOG = LoggerFactory
+            .getLogger(ReaderRunner.class);
+
+    private RecordSender recordSender;
+
+    public void setRecordSender(RecordSender recordSender) {
+        this.recordSender = recordSender;
+    }
+
+    public ReaderRunner(AbstractTaskPlugin abstractTaskPlugin) {
+        super(abstractTaskPlugin);
+    }
+
+    @Override
+    public void run() {
+        assert null != this.recordSender;
+
+        Reader.Task taskReader = (Reader.Task) this.getPlugin();
+
+        //统计waitWriterTime,并且在finally才end。
+        PerfRecord channelWaitWrite = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WAIT_WRITE_TIME);
+        try {
+            channelWaitWrite.start();
+
+            LOG.debug("task reader starts to do init ...");
+            PerfRecord initPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_INIT);
+            initPerfRecord.start();
+            taskReader.init();
+            initPerfRecord.end();
+
+            LOG.debug("task reader starts to do prepare ...");
+            PerfRecord preparePerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_PREPARE);
+            preparePerfRecord.start();
+            taskReader.prepare();
+            preparePerfRecord.end();
+
+            LOG.debug("task reader starts to read ...");
+            PerfRecord dataPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_DATA);
+            dataPerfRecord.start();
+            taskReader.startRead(recordSender);
+            recordSender.terminate();
+
+            dataPerfRecord.addCount(CommunicationTool.getTotalReadRecords(super.getRunnerCommunication()));
+            dataPerfRecord.addSize(CommunicationTool.getTotalReadBytes(super.getRunnerCommunication()));
+            dataPerfRecord.end();
+
+            LOG.debug("task reader starts to do post ...");
+            PerfRecord postPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_POST);
+            postPerfRecord.start();
+            taskReader.post();
+            postPerfRecord.end();
+            // automatic flush
+            // super.markSuccess(); 这里不能标记为成功,成功的标志由 writerRunner 来标志(否则可能导致 reader 先结束,而 writer 还没有结束的严重 bug)
+        } catch (Throwable e) {
+            LOG.error("Reader runner Received Exceptions:", e);
+            super.markFail(e);
+        } finally {
+            LOG.debug("task reader starts to do destroy ...");
+            PerfRecord desPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_DESTROY);
+            desPerfRecord.start();
+            super.destroy();
+            desPerfRecord.end();
+
+            channelWaitWrite.end(super.getRunnerCommunication().getLongCounter(CommunicationTool.WAIT_WRITER_TIME));
+
+            long transformerUsedTime = super.getRunnerCommunication().getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME);
+            if (transformerUsedTime > 0) {
+                PerfRecord transformerRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.TRANSFORMER_TIME);
+                transformerRecord.start();
+                transformerRecord.end(transformerUsedTime);
+            }
+        }
+    }
+
+    @Override
+    public void shutdown(){
+        recordSender.shutdown();
+    }
+}

+ 44 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/TaskGroupContainerRunner.java

@@ -0,0 +1,44 @@
+package cn.tr.plugin.dataX.core.taskgroup.runner;
+
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.core.job.meta.State;
+import cn.tr.plugin.dataX.core.taskgroup.TaskGroupContainer;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+
+public class TaskGroupContainerRunner implements Runnable {
+
+	private TaskGroupContainer taskGroupContainer;
+
+	private State state;
+
+	public TaskGroupContainerRunner(TaskGroupContainer taskGroup) {
+		this.taskGroupContainer = taskGroup;
+		this.state = State.SUCCEEDED;
+	}
+
+	@Override
+	public void run() {
+		try {
+            Thread.currentThread().setName(
+                    String.format("taskGroup-%d", this.taskGroupContainer.getTaskGroupId()));
+            this.taskGroupContainer.start();
+			this.state = State.SUCCEEDED;
+		} catch (Throwable e) {
+			this.state = State.FAILED;
+			throw DataXException.asDataXException(
+					FrameworkErrorCode.RUNTIME_ERROR, e);
+		}
+	}
+
+	public TaskGroupContainer getTaskGroupContainer() {
+		return taskGroupContainer;
+	}
+
+	public State getState() {
+		return state;
+	}
+
+	public void setState(State state) {
+		this.state = state;
+	}
+}

+ 91 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/taskgroup/runner/WriterRunner.java

@@ -0,0 +1,91 @@
+package cn.tr.plugin.dataX.core.taskgroup.runner;
+
+import cn.tr.plugin.dataX.common.plugin.AbstractTaskPlugin;
+import cn.tr.plugin.dataX.common.plugin.RecordReceiver;
+import cn.tr.plugin.dataX.common.spi.Writer;
+import cn.tr.plugin.dataX.common.statistics.PerfRecord;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.util.Validate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Created by jingxing on 14-9-1.
+ * <p/>
+ * 单个slice的writer执行调用
+ */
+public class WriterRunner extends AbstractRunner implements Runnable {
+
+    private static final Logger LOG = LoggerFactory
+            .getLogger(WriterRunner.class);
+
+    private RecordReceiver recordReceiver;
+
+    public void setRecordReceiver(RecordReceiver receiver) {
+        this.recordReceiver = receiver;
+    }
+
+    public WriterRunner(AbstractTaskPlugin abstractTaskPlugin) {
+        super(abstractTaskPlugin);
+    }
+
+    @Override
+    public void run() {
+        Validate.isTrue(this.recordReceiver != null);
+
+        Writer.Task taskWriter = (Writer.Task) this.getPlugin();
+        //统计waitReadTime,并且在finally end
+        PerfRecord channelWaitRead = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WAIT_READ_TIME);
+        try {
+            channelWaitRead.start();
+            LOG.debug("task writer starts to do init ...");
+            PerfRecord initPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_INIT);
+            initPerfRecord.start();
+            taskWriter.init();
+            initPerfRecord.end();
+
+            LOG.debug("task writer starts to do prepare ...");
+            PerfRecord preparePerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_PREPARE);
+            preparePerfRecord.start();
+            taskWriter.prepare();
+            preparePerfRecord.end();
+            LOG.debug("task writer starts to write ...");
+
+            PerfRecord dataPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_DATA);
+            dataPerfRecord.start();
+            taskWriter.startWrite(recordReceiver);
+
+            dataPerfRecord.addCount(CommunicationTool.getTotalReadRecords(super.getRunnerCommunication()));
+            dataPerfRecord.addSize(CommunicationTool.getTotalReadBytes(super.getRunnerCommunication()));
+            dataPerfRecord.end();
+
+            LOG.debug("task writer starts to do post ...");
+            PerfRecord postPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_POST);
+            postPerfRecord.start();
+            taskWriter.post();
+            postPerfRecord.end();
+
+            super.markSuccess();
+        } catch (Throwable e) {
+            LOG.error("Writer Runner Received Exceptions:", e);
+            super.markFail(e);
+        } finally {
+            LOG.debug("task writer starts to do destroy ...");
+            PerfRecord desPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_DESTROY);
+            desPerfRecord.start();
+            super.destroy();
+            desPerfRecord.end();
+            channelWaitRead.end(super.getRunnerCommunication().getLongCounter(CommunicationTool.WAIT_READER_TIME));
+        }
+    }
+    
+    public boolean supportFailOver(){
+    	Writer.Task taskWriter = (Writer.Task) this.getPlugin();
+    	return taskWriter.supportFailOver();
+    }
+
+    @Override
+    public void shutdown(){
+        recordReceiver.shutdown();
+    }
+}

+ 248 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/transport/channel/Channel.java

@@ -0,0 +1,248 @@
+package cn.tr.plugin.dataX.core.transport.channel;
+
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.statistics.communication.Communication;
+import cn.tr.plugin.dataX.core.statistics.communication.CommunicationTool;
+import cn.tr.plugin.dataX.core.transport.record.TerminateRecord;
+import cn.tr.plugin.dataX.core.util.Validate;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+
+/**
+ * Created by jingxing on 14-8-25.
+ * <p/>
+ * 统计和限速都在这里
+ */
+public abstract class Channel {
+
+    private static final Logger LOG = LoggerFactory.getLogger(Channel.class);
+
+    protected int taskGroupId;
+
+    protected int capacity;
+
+    protected int byteCapacity;
+
+    protected long byteSpeed; // bps: bytes/s
+
+    protected long recordSpeed; // tps: records/s
+
+    protected long flowControlInterval;
+
+    protected volatile boolean isClosed = false;
+
+    protected Configuration configuration = null;
+
+    protected volatile long waitReaderTime = 0;
+
+    protected volatile long waitWriterTime = 0;
+
+    private static Boolean isFirstPrint = true;
+
+    private Communication currentCommunication;
+
+    private Communication lastCommunication = new Communication();
+
+    public Channel(final Configuration configuration) {
+        //channel的queue里默认record为1万条。原来为512条
+        int capacity = configuration.getInt(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY, 2048);
+        long byteSpeed = configuration.getLong(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, 1024 * 1024);
+        long recordSpeed = configuration.getLong(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 10000);
+
+        if (capacity <= 0) {
+            throw new IllegalArgumentException(String.format(
+                    "通道容量[%d]必须大于0.", capacity));
+        }
+
+        synchronized (isFirstPrint) {
+            if (isFirstPrint) {
+                Channel.LOG.info("Channel set byte_speed_limit to " + byteSpeed
+                        + (byteSpeed <= 0 ? ", No bps activated." : "."));
+                Channel.LOG.info("Channel set record_speed_limit to " + recordSpeed
+                        + (recordSpeed <= 0 ? ", No tps activated." : "."));
+                isFirstPrint = false;
+            }
+        }
+
+        this.taskGroupId = configuration.getInt(
+                CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID);
+        this.capacity = capacity;
+        this.byteSpeed = byteSpeed;
+        this.recordSpeed = recordSpeed;
+        this.flowControlInterval = configuration.getLong(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_FLOWCONTROLINTERVAL, 1000);
+        //channel的queue默认大小为8M,原来为64M
+        this.byteCapacity = configuration.getInt(
+                CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 8 * 1024 * 1024);
+        this.configuration = configuration;
+    }
+
+    public void close() {
+        this.isClosed = true;
+    }
+
+    public void open() {
+        this.isClosed = false;
+    }
+
+    public boolean isClosed() {
+        return isClosed;
+    }
+
+    public int getTaskGroupId() {
+        return this.taskGroupId;
+    }
+
+    public int getCapacity() {
+        return capacity;
+    }
+
+    public long getByteSpeed() {
+        return byteSpeed;
+    }
+
+    public Configuration getConfiguration() {
+        return this.configuration;
+    }
+
+    public void setCommunication(final Communication communication) {
+        this.currentCommunication = communication;
+        this.lastCommunication.reset();
+    }
+
+    public void push(final Record r) {
+        Validate.notNull(r, "record不能为空.");
+        this.doPush(r);
+        this.statPush(1L, r.getByteSize());
+    }
+
+    public void pushTerminate(final TerminateRecord r) {
+        Validate.notNull(r, "record不能为空.");
+        this.doPush(r);
+
+//        // 对 stage + 1
+//        currentCommunication.setLongCounter(CommunicationTool.STAGE,
+//                currentCommunication.getLongCounter(CommunicationTool.STAGE) + 1);
+    }
+
+    public void pushAll(final Collection<Record> rs) {
+        Validate.notNull(rs);
+        Validate.noNullElements(rs);
+        this.doPushAll(rs);
+        this.statPush(rs.size(), this.getByteSize(rs));
+    }
+
+    public Record pull() {
+        Record record = this.doPull();
+        this.statPull(1L, record.getByteSize());
+        return record;
+    }
+
+    public void pullAll(final Collection<Record> rs) {
+        Validate.notNull(rs);
+        this.doPullAll(rs);
+        this.statPull(rs.size(), this.getByteSize(rs));
+    }
+
+    protected abstract void doPush(Record r);
+
+    protected abstract void doPushAll(Collection<Record> rs);
+
+    protected abstract Record doPull();
+
+    protected abstract void doPullAll(Collection<Record> rs);
+
+    public abstract int size();
+
+    public abstract boolean isEmpty();
+
+    public abstract void clear();
+
+    private long getByteSize(final Collection<Record> rs) {
+        long size = 0;
+        for (final Record each : rs) {
+            size += each.getByteSize();
+        }
+        return size;
+    }
+
+    private void statPush(long recordSize, long byteSize) {
+        currentCommunication.increaseCounter(CommunicationTool.READ_SUCCEED_RECORDS,
+                recordSize);
+        currentCommunication.increaseCounter(CommunicationTool.READ_SUCCEED_BYTES,
+                byteSize);
+        //在读的时候进行统计waitCounter即可,因为写(pull)的时候可能正在阻塞,但读的时候已经能读到这个阻塞的counter数
+
+        currentCommunication.setLongCounter(CommunicationTool.WAIT_READER_TIME, waitReaderTime);
+        currentCommunication.setLongCounter(CommunicationTool.WAIT_WRITER_TIME, waitWriterTime);
+
+        boolean isChannelByteSpeedLimit = (this.byteSpeed > 0);
+        boolean isChannelRecordSpeedLimit = (this.recordSpeed > 0);
+        if (!isChannelByteSpeedLimit && !isChannelRecordSpeedLimit) {
+            return;
+        }
+
+        long lastTimestamp = lastCommunication.getTimestamp();
+        long nowTimestamp = System.currentTimeMillis();
+        long interval = nowTimestamp - lastTimestamp;
+        if (interval - this.flowControlInterval >= 0) {
+            long byteLimitSleepTime = 0;
+            long recordLimitSleepTime = 0;
+            if (isChannelByteSpeedLimit) {
+                long currentByteSpeed = (CommunicationTool.getTotalReadBytes(currentCommunication) -
+                        CommunicationTool.getTotalReadBytes(lastCommunication)) * 1000 / interval;
+                if (currentByteSpeed > this.byteSpeed) {
+                    // 计算根据byteLimit得到的休眠时间
+                    byteLimitSleepTime = currentByteSpeed * interval / this.byteSpeed
+                            - interval;
+                }
+            }
+
+            if (isChannelRecordSpeedLimit) {
+                long currentRecordSpeed = (CommunicationTool.getTotalReadRecords(currentCommunication) -
+                        CommunicationTool.getTotalReadRecords(lastCommunication)) * 1000 / interval;
+                if (currentRecordSpeed > this.recordSpeed) {
+                    // 计算根据recordLimit得到的休眠时间
+                    recordLimitSleepTime = currentRecordSpeed * interval / this.recordSpeed
+                            - interval;
+                }
+            }
+
+            // 休眠时间取较大值
+            long sleepTime = byteLimitSleepTime < recordLimitSleepTime ?
+                    recordLimitSleepTime : byteLimitSleepTime;
+            if (sleepTime > 0) {
+                try {
+                    Thread.sleep(sleepTime);
+                } catch (InterruptedException e) {
+                    Thread.currentThread().interrupt();
+                }
+            }
+
+            lastCommunication.setLongCounter(CommunicationTool.READ_SUCCEED_BYTES,
+                    currentCommunication.getLongCounter(CommunicationTool.READ_SUCCEED_BYTES));
+            lastCommunication.setLongCounter(CommunicationTool.READ_FAILED_BYTES,
+                    currentCommunication.getLongCounter(CommunicationTool.READ_FAILED_BYTES));
+            lastCommunication.setLongCounter(CommunicationTool.READ_SUCCEED_RECORDS,
+                    currentCommunication.getLongCounter(CommunicationTool.READ_SUCCEED_RECORDS));
+            lastCommunication.setLongCounter(CommunicationTool.READ_FAILED_RECORDS,
+                    currentCommunication.getLongCounter(CommunicationTool.READ_FAILED_RECORDS));
+            lastCommunication.setTimestamp(nowTimestamp);
+        }
+    }
+
+    private void statPull(long recordSize, long byteSize) {
+        currentCommunication.increaseCounter(
+                CommunicationTool.WRITE_RECEIVED_RECORDS, recordSize);
+        currentCommunication.increaseCounter(
+                CommunicationTool.WRITE_RECEIVED_BYTES, byteSize);
+    }
+
+}

+ 145 - 0
tr-plugins/tr-spring-boot-starter-plugin-dataX/src/main/java/cn/tr/plugin/dataX/core/transport/channel/memory/MemoryChannel.java

@@ -0,0 +1,145 @@
+package cn.tr.plugin.dataX.core.transport.channel.memory;
+
+import cn.tr.plugin.dataX.common.element.Record;
+import cn.tr.plugin.dataX.common.exception.DataXException;
+import cn.tr.plugin.dataX.common.util.Configuration;
+import cn.tr.plugin.dataX.core.transport.channel.Channel;
+import cn.tr.plugin.dataX.core.transport.record.TerminateRecord;
+import cn.tr.plugin.dataX.core.util.FrameworkErrorCode;
+import cn.tr.plugin.dataX.core.util.container.CoreConstant;
+import java.util.Collection;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * 内存Channel的具体实现,底层其实是一个ArrayBlockingQueue
+ *
+ */
+public class MemoryChannel extends Channel {
+
+	private int bufferSize = 0;
+
+	private AtomicInteger memoryBytes = new AtomicInteger(0);
+
+	private ArrayBlockingQueue<Record> queue = null;
+
+	private ReentrantLock lock;
+
+	private Condition notInsufficient, notEmpty;
+
+	public MemoryChannel(final Configuration configuration) {
+		super(configuration);
+		this.queue = new ArrayBlockingQueue<Record>(this.getCapacity());
+		this.bufferSize = configuration.getInt(CoreConstant.DATAX_CORE_TRANSPORT_EXCHANGER_BUFFERSIZE);
+
+		lock = new ReentrantLock();
+		notInsufficient = lock.newCondition();
+		notEmpty = lock.newCondition();
+	}
+
+	@Override
+	public void close() {
+		super.close();
+		try {
+			this.queue.put(TerminateRecord.get());
+		} catch (InterruptedException ex) {
+			Thread.currentThread().interrupt();
+		}
+	}
+
+	@Override
+	public void clear(){
+		this.queue.clear();
+	}
+
+	@Override
+	protected void doPush(Record r) {
+		try {
+			long startTime = System.nanoTime();
+			this.queue.put(r);
+			waitWriterTime += System.nanoTime() - startTime;
+            memoryBytes.addAndGet(r.getMemorySize());
+		} catch (InterruptedException ex) {
+			Thread.currentThread().interrupt();
+		}
+	}
+
+	@Override
+	protected void doPushAll(Collection<Record> rs) {
+		try {
+			long startTime = System.nanoTime();
+			lock.lockInterruptibly();
+			int bytes = getRecordBytes(rs);
+			while (memoryBytes.get() + bytes > this.byteCapacity || rs.size() > this.queue.remainingCapacity()) {
+				notInsufficient.await(200L, TimeUnit.MILLISECONDS);
+            }
+			this.queue.addAll(rs);
+			waitWriterTime += System.nanoTime() - startTime;
+			memoryBytes.addAndGet(bytes);
+			notEmpty.signalAll();
+		} catch (InterruptedException e) {
+			throw DataXException.asDataXException(
+					FrameworkErrorCode.RUNTIME_ERROR, e);
+		} finally {
+			lock.unlock();
+		}
+	}
+
+	@Override
+	protected Record doPull() {
+		try {
+			long startTime = System.nanoTime();
+			Record r = this.queue.take();
+			waitReaderTime += System.nanoTime() - startTime;
+			memoryBytes.addAndGet(-r.getMemorySize());
+			return r;
+		} catch (InterruptedException e) {
+			Thread.currentThread().interrupt();
+			throw new IllegalStateException(e);
+		}
+	}
+
+	@Override
+	protected void doPullAll(Collection<Record> rs) {
+		assert rs != null;
+		rs.clear();
+		try {
+			long startTime = System.nanoTime();
+			lock.lockInterruptibly();
+			while (this.queue.drainTo(rs, bufferSize) <= 0) {
+				notEmpty.await(200L, TimeUnit.MILLISECONDS);
+			}
+			waitReaderTime += System.nanoTime() - startTime;
+			int bytes = getRecordBytes(rs);
+			memoryBytes.addAndGet(-bytes);
+			notInsufficient.signalAll();
+		} catch (InterruptedException e) {
+			throw DataXException.asDataXException(
+					FrameworkErrorCode.RUNTIME_ERROR, e);
+		} finally {
+			lock.unlock();
+		}
+	}
+
+	private int getRecordBytes(Collection<Record> rs){
+		int bytes = 0;
+		for(Record r : rs){
+			bytes += r.getMemorySize();
+		}
+		return bytes;
+	}
+
+	@Override
+	public int size() {
+		return this.queue.size();
+	}
+
+	@Override
+	public boolean isEmpty() {
+		return this.queue.isEmpty();
+	}
+
+}

Some files were not shown because too many files changed in this diff