diff --git a/README.md b/README.md index a4a9996711..ee3f5db640 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,8 @@ ![license](https://img.shields.io/github/license/alibaba/canal.svg) [![average time to resolve an issue](http://isitmaintained.com/badge/resolution/alibaba/canal.svg)](http://isitmaintained.com/project/alibaba/canal "average time to resolve an issue") [![percentage of issues still open](http://isitmaintained.com/badge/open/alibaba/canal.svg)](http://isitmaintained.com/project/alibaba/canal "percentage of issues still open") +[![Leaderboard](https://img.shields.io/badge/Canal-%E6%9F%A5%E7%9C%8B%E8%B4%A1%E7%8C%AE%E6%8E%92%E8%A1%8C%E6%A6%9C-orange)](https://opensource.alibaba.com/contribution_leaderboard/details?projectValue=canal) + ## 简介 @@ -107,6 +109,7 @@ canal 特别设计了 client-server 模式,交互协议使用 protobuf 3.0 , c - canal php客户端: [https://github.com/xingwenge/canal-php](https://github.com/xingwenge/canal-php) - canal Python客户端:[https://github.com/haozi3156666/canal-python](https://github.com/haozi3156666/canal-python) - canal Rust客户端:[https://github.com/laohanlinux/canal-rs](https://github.com/laohanlinux/canal-rs) +- canal Nodejs客户端:[https://github.com/marmot-z/canal-nodejs](https://github.com/marmot-z/canal-nodejs) canal 作为 MySQL binlog 增量获取和解析工具,可将变更记录投递到 MQ 系统中,比如 Kafka/RocketMQ,可以借助于 MQ 的多语言能力 @@ -122,3 +125,5 @@ canal 作为 MySQL binlog 增量获取和解析工具,可将变更记录投递 ## 问题反馈 - 报告 issue: [github issues](https://github.com/alibaba/canal/issues) + +本项目的Issues会被同步沉淀至[阿里云开发者社区](https://developer.aliyun.com/ask) diff --git a/admin/admin-ui/pom.xml b/admin/admin-ui/pom.xml index eaa81c11b8..fcd9f5abfb 100644 --- a/admin/admin-ui/pom.xml +++ b/admin/admin-ui/pom.xml @@ -3,7 +3,7 @@ canal.admin com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT 4.0.0 diff --git a/admin/admin-ui/src/views/canalServer/CanalInstanceUpdate.vue b/admin/admin-ui/src/views/canalServer/CanalInstanceUpdate.vue index cdb168f2fb..814ff44c5b 100644 --- a/admin/admin-ui/src/views/canalServer/CanalInstanceUpdate.vue +++ b/admin/admin-ui/src/views/canalServer/CanalInstanceUpdate.vue @@ -59,7 +59,7 @@ export default { canalInstanceDetail(this.$route.query.id).then(response => { const data = response.data this.form.id = data.id - this.form.name = data.name + '/instance.propertios' + this.form.name = data.name + '/instance.properties' this.form.content = data.content this.form.clusterServerId = data.clusterServerId }) diff --git a/admin/admin-ui/src/views/login/index.vue b/admin/admin-ui/src/views/login/index.vue index 1a81f8f81c..d500ebbfcf 100644 --- a/admin/admin-ui/src/views/login/index.vue +++ b/admin/admin-ui/src/views/login/index.vue @@ -66,8 +66,8 @@ export default { } } const validatePassword = (rule, value, callback) => { - if (value.length < 6) { - callback(new Error('The password can not be less than 6 digits')) + if (value.length < 5) { + callback(new Error('The password can not be less than 5 digits')) } else { callback() } diff --git a/admin/admin-web/pom.xml b/admin/admin-web/pom.xml index f9ebed8a03..7c556676b8 100644 --- a/admin/admin-web/pom.xml +++ b/admin/admin-web/pom.xml @@ -3,7 +3,7 @@ canal.admin com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT 4.0.0 canal.admin-web @@ -30,6 +30,8 @@ mysql mysql-connector-java + + 8.0.33 com.github.ben-manes.caffeine diff --git a/admin/admin-web/src/main/bin/startup.sh b/admin/admin-web/src/main/bin/startup.sh index ec18c408ba..3e5bfa6d42 100644 --- a/admin/admin-web/src/main/bin/startup.sh +++ b/admin/admin-web/src/main/bin/startup.sh @@ -56,14 +56,29 @@ in exit;; esac +JavaVersion=`$JAVA -version 2>&1 |awk 'NR==1{ gsub(/"/,""); print $3 }' | awk -F '.' '{print $1}'` str=`file -L $JAVA | grep 64-bit` + +JAVA_OPTS="$JAVA_OPTS -Xss1m -XX:+AggressiveOpts -XX:-UseBiasedLocking -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$base/logs" +if [ $JavaVersion -ge 11 ] ; then + #JAVA_OPTS="$JAVA_OPTS -Xlog:gc*:$base_log/gc.log:time " + JAVA_OPTS="$JAVA_OPTS" +else + #JAVA_OPTS="$JAVA_OPTS -Xloggc:$base/logs/canal/gc.log -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime" + JAVA_OPTS="$JAVA_OPTS -XX:+UseFastAccessorMethods -XX:+PrintAdaptiveSizePolicy -XX:+PrintTenuringDistribution" +fi + if [ -n "$str" ]; then - JAVA_OPTS="-server -Xms2048m -Xmx3072m" + if [ $JavaVersion -ge 11 ] ; then + # For G1 + JAVA_OPTS="-server -Xms2g -Xmx3g -XX:+UseG1GC -XX:MaxGCPauseMillis=250 -XX:+UseGCOverheadLimit -XX:+ExplicitGCInvokesConcurrent $JAVA_OPTS" + else + JAVA_OPTS="-server -Xms2g -Xmx3g -Xmn1g -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC $JAVA_OPTS" + fi else - JAVA_OPTS="-server -Xms1024m -Xmx1024m" + JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m $JAVA_OPTS" fi -JAVA_OPTS="$JAVA_OPTS -XX:+UseG1GC -XX:MaxGCPauseMillis=250 -XX:+UseGCOverheadLimit -XX:+ExplicitGCInvokesConcurrent -XX:+PrintAdaptiveSizePolicy -XX:+PrintTenuringDistribution" JAVA_OPTS=" $JAVA_OPTS -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dfile.encoding=UTF-8" CANAL_OPTS="-DappName=canal-admin" @@ -81,4 +96,4 @@ $JAVA $JAVA_OPTS $JAVA_DEBUG_OPT $CANAL_OPTS -classpath .:$CLASSPATH com.alibaba echo $! > $base/bin/admin.pid echo "cd to $current_path for continue" -cd $current_path \ No newline at end of file +cd $current_path diff --git a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/connector/SimpleAdminConnectors.java b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/connector/SimpleAdminConnectors.java index bf9969542b..753e1b11d7 100644 --- a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/connector/SimpleAdminConnectors.java +++ b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/connector/SimpleAdminConnectors.java @@ -2,6 +2,7 @@ import java.util.function.Function; +import org.assertj.core.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.env.Environment; @@ -26,7 +27,8 @@ public static R execute(String ip, int port, String user, String passwd, Fun connector.connect(); return function.apply(connector); } catch (Exception e) { - logger.error("connect to ip:{},port:{},user:{},password:{}, failed", ip, port, user, passwd); + logger.error("connect to ip:{},port:{},user:{},password:{}, failed", + ip, port, user, getDesensitizationPassword(passwd)); logger.error(e.getMessage()); } finally { connector.disconnect(); @@ -34,4 +36,14 @@ public static R execute(String ip, int port, String user, String passwd, Fun return null; } + + private static String getDesensitizationPassword(String password) { + String defaultPassword = "*****"; + + if (Strings.isNullOrEmpty(password) || password.length() < 5) { + return defaultPassword; + } else { + return String.format("%s*****", password.substring(0, 3)); + } + } } diff --git a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/controller/CanalClusterController.java b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/controller/CanalClusterController.java index a6b4d02521..e1a228bf6d 100644 --- a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/controller/CanalClusterController.java +++ b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/controller/CanalClusterController.java @@ -13,8 +13,8 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson2.JSONArray; +import com.alibaba.fastjson2.JSONObject; import com.alibaba.otter.canal.admin.model.BaseModel; import com.alibaba.otter.canal.admin.model.CanalCluster; import com.alibaba.otter.canal.admin.model.NodeServer; diff --git a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/service/impl/UserServiceImpl.java b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/service/impl/UserServiceImpl.java index bb23eeffeb..5aaf554379 100644 --- a/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/service/impl/UserServiceImpl.java +++ b/admin/admin-web/src/main/java/com/alibaba/otter/canal/admin/service/impl/UserServiceImpl.java @@ -21,6 +21,8 @@ public class UserServiceImpl implements UserService { private static byte[] seeds = "canal is best!".getBytes(); + private static final Integer PASSWORD_LENGTH = 5; + public User find4Login(String username, String password) { if (StringUtils.isEmpty(username) || StringUtils.isEmpty(password)) { return null; @@ -43,6 +45,9 @@ public User find4Login(String username, String password) { } public void update(User user) { + if (user.getPassword().length() < PASSWORD_LENGTH) { + throw new ServiceException("The new password is too short,must more than 6 digits"); + } User userTmp = User.find.query().where().eq("username", user.getUsername()).findOne(); if (userTmp == null) { throw new ServiceException(); diff --git a/admin/admin-web/src/main/resources/canal-template.properties b/admin/admin-web/src/main/resources/canal-template.properties index 9655f1e242..477bb9573e 100644 --- a/admin/admin-web/src/main/resources/canal-template.properties +++ b/admin/admin-web/src/main/resources/canal-template.properties @@ -25,7 +25,7 @@ canal.zkServers = # flush data to zk canal.zookeeper.flush.period = 1000 canal.withoutNetty = false -# tcp, kafka, rocketMQ, rabbitMQ +# tcp, kafka, rocketMQ, rabbitMQ, pulsarMQ canal.serverMode = tcp # flush meta cursor/parse position to file canal.file.data.dir = ${canal.conf.dir} @@ -96,7 +96,7 @@ canal.instance.tsdb.snapshot.expire = 360 ################################################# ######### destinations ############# ################################################# -canal.destinations = +canal.destinations = # conf root dir canal.conf.dir = ../conf # auto scan instance dir add/remove and start/stop instance @@ -148,8 +148,13 @@ kafka.max.in.flight.requests.per.connection = 1 kafka.retries = 0 kafka.kerberos.enable = false -kafka.kerberos.krb5.file = "../conf/kerberos/krb5.conf" -kafka.kerberos.jaas.file = "../conf/kerberos/jaas.conf" +kafka.kerberos.krb5.file = ../conf/kerberos/krb5.conf +kafka.kerberos.jaas.file = ../conf/kerberos/jaas.conf + +# sasl demo +# kafka.sasl.jaas.config = org.apache.kafka.common.security.scram.ScramLoginModule required \\n username=\"alice\" \\npassword="alice-secret\"; +# kafka.sasl.mechanism = SCRAM-SHA-512 +# kafka.security.protocol = SASL_PLAINTEXT ################################################## ######### RocketMQ ############# @@ -161,7 +166,7 @@ rocketmq.namespace = rocketmq.namesrv.addr = 127.0.0.1:9876 rocketmq.retry.times.when.send.failed = 0 rocketmq.vip.channel.enabled = false -rocketmq.tag = +rocketmq.tag = ################################################## ######### RabbitMQ ############# @@ -171,4 +176,12 @@ rabbitmq.virtual.host = rabbitmq.exchange = rabbitmq.username = rabbitmq.password = -rabbitmq.deliveryMode = \ No newline at end of file +rabbitmq.deliveryMode = + + +################################################## +######### Pulsar ############# +################################################## +pulsarmq.serverUrl = +pulsarmq.roleToken = +pulsarmq.topicTenantPrefix = diff --git a/admin/pom.xml b/admin/pom.xml index e1404b582d..94469ab43e 100644 --- a/admin/pom.xml +++ b/admin/pom.xml @@ -1,9 +1,15 @@ + + com.alibaba.otter + canal + 1.1.8-SNAPSHOT + ../pom.xml + 4.0.0 com.alibaba.otter canal.admin - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT pom canal admin module for otter ${project.version} @@ -25,67 +31,20 @@ org.springframework.cloud spring-cloud-context - 2.0.0.RELEASE + 3.0.6 - - org.springframework - spring-core - 5.0.5.RELEASE - - - org.springframework - spring-aop - 5.0.5.RELEASE - - - org.springframework - spring-context - 5.0.5.RELEASE - - - org.springframework - spring-jdbc - 5.0.5.RELEASE - - - org.springframework - spring-orm - 5.0.5.RELEASE - - - org.springframework - spring-test - 5.0.5.RELEASE - test - - org.springframework.boot spring-boot-dependencies - 2.0.1.RELEASE + 2.5.4 pom import - - mysql - mysql-connector-java - 5.1.48 - com.github.ben-manes.caffeine caffeine 2.6.2 - - commons-beanutils - commons-beanutils - 1.8.2 - - - commons-lang - commons-lang - 2.6 - io.ebean ebean diff --git a/client-adapter/common/pom.xml b/client-adapter/common/pom.xml index acc34e9d5f..b1b4eaf0da 100644 --- a/client-adapter/common/pom.xml +++ b/client-adapter/common/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -23,29 +23,8 @@ ${project.version} - org.springframework - spring-core - - - org.springframework - spring-aop - - - org.springframework - spring-context - - - org.springframework - spring-jdbc - - - org.springframework - spring-orm - - - org.springframework - spring-test - test + org.springframework.boot + spring-boot joda-time @@ -55,10 +34,6 @@ com.alibaba druid - - org.yaml - snakeyaml - org.powermock powermock-api-mockito @@ -79,11 +54,15 @@ junit test + + org.yaml + snakeyaml + test + com.diffblue deeptestutils test - diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/ProxyOuterAdapter.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/ProxyOuterAdapter.java new file mode 100644 index 0000000000..1c8d27c495 --- /dev/null +++ b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/ProxyOuterAdapter.java @@ -0,0 +1,87 @@ +package com.alibaba.otter.canal.client.adapter; + +import com.alibaba.otter.canal.client.adapter.support.Dml; +import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +public class ProxyOuterAdapter implements OuterAdapter { + + private OuterAdapter outerAdapter; + + public ProxyOuterAdapter(OuterAdapter outerAdapter) { + this.outerAdapter = outerAdapter; + } + + private ClassLoader changeCL() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(outerAdapter.getClass().getClassLoader()); + return cl; + } + + private void revertCL(ClassLoader cl) { + Thread.currentThread().setContextClassLoader(cl); + } + + @Override + public void init(OuterAdapterConfig configuration, Properties envProperties) { + ClassLoader cl = changeCL(); + try { + outerAdapter.init(configuration, envProperties); + } finally { + revertCL(cl); + } + } + + @Override + public void sync(List dmls) { + ClassLoader cl = changeCL(); + try { + outerAdapter.sync(dmls); + } finally { + revertCL(cl); + } + } + + @Override + public void destroy() { + ClassLoader cl = changeCL(); + try { + outerAdapter.destroy(); + } finally { + revertCL(cl); + } + } + + @Override + public EtlResult etl(String task, List params) { + ClassLoader cl = changeCL(); + try { + return OuterAdapter.super.etl(task, params); + } finally { + revertCL(cl); + } + } + + @Override + public Map count(String task) { + ClassLoader cl = changeCL(); + try { + return OuterAdapter.super.count(task); + } finally { + revertCL(cl); + } + } + + @Override + public String getDestination(String task) { + ClassLoader cl = changeCL(); + try { + return OuterAdapter.super.getDestination(task); + } finally { + revertCL(cl); + } + } +} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java deleted file mode 100644 index 59a48d6101..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/YmlConfigBinder.java +++ /dev/null @@ -1,155 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config; - -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Properties; - -import org.springframework.util.PropertyPlaceholderHelper; -import org.springframework.util.StringUtils; - -import com.alibaba.otter.canal.client.adapter.config.bind.PropertiesConfigurationFactory; -import com.alibaba.otter.canal.client.adapter.config.common.*; - -/** - * 将yaml内容绑定到指定对象, 遵循spring yml的绑定规范 - * - * @author reweerma 2019-2-1 上午9:14:02 - * @version 1.0.0 - */ -public class YmlConfigBinder { - - /** - * 将当前内容绑定到指定对象 - * - * @param content yml内容 - * @param clazz 指定对象类型 - * @return 对象 - */ - public static T bindYmlToObj(String content, Class clazz) { - return bindYmlToObj(null, content, clazz, null); - } - - /** - * 将当前内容绑定到指定对象并指定内容编码格式 - * - * @param content yml内容 - * @param clazz 指定对象类型 - * @param charset yml内容编码格式 - * @return 对象 - */ - public static T bindYmlToObj(String content, Class clazz, String charset) { - return bindYmlToObj(null, content, clazz, charset); - } - - /** - * 将当前内容指定前缀部分绑定到指定对象 - * - * @param prefix 指定前缀 - * @param content yml内容 - * @param clazz 指定对象类型 - * @return 对象 - */ - public static T bindYmlToObj(String prefix, String content, Class clazz) { - return bindYmlToObj(prefix, content, clazz, null); - } - - /** - * 将当前内容指定前缀部分绑定到指定对象并指定内容编码格式 - * - * @param prefix 指定前缀 - * @param content yml内容 - * @param clazz 指定对象类型 - * @param charset yml内容编码格式 - * @return 对象 - */ - public static T bindYmlToObj(String prefix, String content, Class clazz, String charset) { - return bindYmlToObj(prefix, content, clazz, charset, null); - } - - /** - * 将当前内容指定前缀部分绑定到指定对象并用环境变量中的属性替换占位符, 例: 当前内容有属性 zkServers: ${zookeeper.servers} - * 在envProperties中有属性 zookeeper.servers: - * 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 则当前内容 zkServers 会被替换为 - * zkServers: 192.168.0.1:2181,192.168.0.1:2181,192.168.0.1:2181 注: 假设绑定的类中 - * zkServers 属性是 List 对象, 则会自动映射成List - * - * @param prefix 指定前缀 - * @param content yml内容 - * @param clazz 指定对象类型 - * @param charset yml内容编码格式 - * @return 对象 - */ - public static T bindYmlToObj(String prefix, String content, Class clazz, String charset, - Properties baseProperties) { - try { - byte[] contentBytes; - if (charset == null) { - contentBytes = content.getBytes("UTF-8"); - } else { - contentBytes = content.getBytes(charset); - } - YamlPropertySourceLoader propertySourceLoader = new YamlPropertySourceLoader(); - Resource configResource = new ByteArrayResource(contentBytes); - PropertySource propertySource = propertySourceLoader.load("manualBindConfig", configResource, null); - - if (propertySource == null) { - return null; - } - - Properties properties = new Properties(); - Map propertiesRes = new LinkedHashMap<>(); - if (!StringUtils.isEmpty(prefix) && !prefix.endsWith(".")) { - prefix = prefix + "."; - } - - properties.putAll((Map) propertySource.getSource()); - - if (baseProperties != null) { - baseProperties.putAll(properties); - properties = baseProperties; - } - - for (Map.Entry entry : ((Map) propertySource.getSource()).entrySet()) { - String key = (String) entry.getKey(); - Object value = entry.getValue(); - - if (prefix != null) { - if (key != null && key.startsWith(prefix)) { - key = key.substring(prefix.length()); - } else { - continue; - } - } - - if (value instanceof String && ((String) value).contains("${") && ((String) value).contains("}")) { - PropertyPlaceholderHelper propertyPlaceholderHelper = new PropertyPlaceholderHelper("${", "}"); - value = propertyPlaceholderHelper.replacePlaceholders((String) value, properties); - } - - propertiesRes.put(key, value); - } - - if (propertiesRes.isEmpty()) { - return null; - } - - propertySource = new MapPropertySource(propertySource.getName(), propertiesRes); - - T target = clazz.newInstance(); - - MutablePropertySources propertySources = new MutablePropertySources(); - propertySources.addFirst(propertySource); - - PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory<>(target); - factory.setPropertySources(propertySources); - factory.setIgnoreInvalidFields(true); - factory.setIgnoreUnknownFields(true); - - factory.bindPropertiesToTarget(); - - return target; - } catch (Exception e) { - throw new RuntimeException(e); - } - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/DefaultPropertyNamePatternsMatcher.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/DefaultPropertyNamePatternsMatcher.java deleted file mode 100644 index 289eda972d..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/DefaultPropertyNamePatternsMatcher.java +++ /dev/null @@ -1,97 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -/** - * {@link PropertyNamePatternsMatcher} that matches when a property name exactly - * matches one of the given names, or starts with one of the given names - * followed by a delimiter. This implementation is optimized for frequent calls. - * - * @author Phillip Webb - * @since 1.2.0 - */ -class DefaultPropertyNamePatternsMatcher implements PropertyNamePatternsMatcher { - - private final char[] delimiters; - - private final boolean ignoreCase; - - private final String[] names; - - protected DefaultPropertyNamePatternsMatcher(char[] delimiters, String... names){ - this(delimiters, false, names); - } - - protected DefaultPropertyNamePatternsMatcher(char[] delimiters, boolean ignoreCase, String... names){ - this(delimiters, ignoreCase, new HashSet<>(Arrays.asList(names))); - } - - DefaultPropertyNamePatternsMatcher(char[] delimiters, boolean ignoreCase, Set names){ - this.delimiters = delimiters; - this.ignoreCase = ignoreCase; - this.names = names.toArray(new String[names.size()]); - } - - @Override - public boolean matches(String propertyName) { - char[] propertyNameChars = propertyName.toCharArray(); - boolean[] match = new boolean[this.names.length]; - boolean noneMatched = true; - for (int i = 0; i < this.names.length; i++) { - if (this.names[i].length() <= propertyNameChars.length) { - match[i] = true; - noneMatched = false; - } - } - if (noneMatched) { - return false; - } - for (int charIndex = 0; charIndex < propertyNameChars.length; charIndex++) { - for (int nameIndex = 0; nameIndex < this.names.length; nameIndex++) { - if (match[nameIndex]) { - match[nameIndex] = false; - if (charIndex < this.names[nameIndex].length()) { - if (isCharMatch(this.names[nameIndex].charAt(charIndex), propertyNameChars[charIndex])) { - match[nameIndex] = true; - noneMatched = false; - } - } else { - char charAfter = propertyNameChars[this.names[nameIndex].length()]; - if (isDelimiter(charAfter)) { - match[nameIndex] = true; - noneMatched = false; - } - } - } - } - if (noneMatched) { - return false; - } - } - for (boolean b : match) { - if (b) { - return true; - } - } - return false; - } - - private boolean isCharMatch(char c1, char c2) { - if (this.ignoreCase) { - return Character.toLowerCase(c1) == Character.toLowerCase(c2); - } - return c1 == c2; - } - - private boolean isDelimiter(char c) { - for (char delimiter : this.delimiters) { - if (c == delimiter) { - return true; - } - } - return false; - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/InetAddressEditor.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/InetAddressEditor.java deleted file mode 100644 index c9b259e3da..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/InetAddressEditor.java +++ /dev/null @@ -1,31 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.beans.PropertyEditorSupport; -import java.net.InetAddress; -import java.net.UnknownHostException; - -/** - * {@link PropertyNamePatternsMatcher} that matches when a property name exactly - * matches one of the given names, or starts with one of the given names - * followed by a delimiter. This implementation is optimized for frequent calls. - * - * @author Phillip Webb - * @since 1.2.0 - */ -class InetAddressEditor extends PropertyEditorSupport { - - @Override - public String getAsText() { - return ((InetAddress) getValue()).getHostAddress(); - } - - @Override - public void setAsText(String text) throws IllegalArgumentException { - try { - setValue(InetAddress.getByName(text)); - } catch (UnknownHostException ex) { - throw new IllegalArgumentException("Cannot locate host", ex); - } - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java deleted file mode 100644 index b79a03e877..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/OriginCapablePropertyValue.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import org.springframework.beans.PropertyValue; - -import com.alibaba.otter.canal.client.adapter.config.common.PropertySource; - -/** - * A {@link PropertyValue} that can provide information about its origin. - * - * @author Andy Wilkinson - */ -class OriginCapablePropertyValue extends PropertyValue { - - private static final String ATTRIBUTE_PROPERTY_ORIGIN = "propertyOrigin"; - - private final PropertyOrigin origin; - - OriginCapablePropertyValue(PropertyValue propertyValue){ - this(propertyValue.getName(), - propertyValue.getValue(), - (PropertyOrigin) propertyValue.getAttribute(ATTRIBUTE_PROPERTY_ORIGIN)); - } - - OriginCapablePropertyValue(String name, Object value, String originName, PropertySource originSource){ - this(name, value, new PropertyOrigin(originSource, originName)); - } - - OriginCapablePropertyValue(String name, Object value, PropertyOrigin origin){ - super(name, value); - this.origin = origin; - setAttribute(ATTRIBUTE_PROPERTY_ORIGIN, origin); - } - - public PropertyOrigin getOrigin() { - return this.origin; - } - - @Override - public String toString() { - String name = (this.origin != null ? this.origin.getName() : this.getName()); - String source = (this.origin.getSource() != null ? this.origin.getSource().getName() : "unknown"); - return "'" + name + "' from '" + source + "'"; - } - - public static PropertyOrigin getOrigin(PropertyValue propertyValue) { - if (propertyValue instanceof OriginCapablePropertyValue) { - return ((OriginCapablePropertyValue) propertyValue).getOrigin(); - } - return new OriginCapablePropertyValue(propertyValue).getOrigin(); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PatternPropertyNamePatternsMatcher.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PatternPropertyNamePatternsMatcher.java deleted file mode 100644 index a041a673d9..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PatternPropertyNamePatternsMatcher.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.util.Collection; - -import org.springframework.util.PatternMatchUtils; - -/** - * {@link PropertyNamePatternsMatcher} that delegates to - * {@link PatternMatchUtils#simpleMatch(String[], String)}. - * - * @author Phillip Webb - * @since 1.2.0 - */ -class PatternPropertyNamePatternsMatcher implements PropertyNamePatternsMatcher { - - private final String[] patterns; - - PatternPropertyNamePatternsMatcher(Collection patterns){ - this.patterns = (patterns != null ? patterns.toArray(new String[patterns.size()]) : new String[] {}); - } - - @Override - public boolean matches(String propertyName) { - return PatternMatchUtils.simpleMatch(this.patterns, propertyName); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertiesConfigurationFactory.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertiesConfigurationFactory.java deleted file mode 100644 index e8162efa68..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertiesConfigurationFactory.java +++ /dev/null @@ -1,356 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.beans.PropertyDescriptor; -import java.util.*; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.BeanUtils; -import org.springframework.beans.PropertyValues; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.support.ResourceEditorRegistrar; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.MessageSource; -import org.springframework.context.MessageSourceAware; -import org.springframework.core.convert.ConversionService; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; -import org.springframework.validation.*; - -import com.alibaba.otter.canal.client.adapter.config.common.PropertySources; - -/** - * Validate some {@link Properties} (or optionally - * {@link org.springframework.core.env.PropertySources}) by binding them to an - * object of a specified type and then optionally running a {@link Validator} - * over it. - * - * @param the target type - * @author Dave Syer - */ -public class PropertiesConfigurationFactory implements FactoryBean, ApplicationContextAware, MessageSourceAware, InitializingBean { - - private static final char[] EXACT_DELIMITERS = { '_', '.', '[' }; - - private static final char[] TARGET_NAME_DELIMITERS = { '_', '.' }; - - private static final Log logger = LogFactory.getLog(PropertiesConfigurationFactory.class); - - private boolean ignoreUnknownFields = true; - - private boolean ignoreInvalidFields; - - private boolean exceptionIfInvalid = true; - - private PropertySources propertySources; - - private final T target; - - private Validator validator; - - private ApplicationContext applicationContext; - - private MessageSource messageSource; - - private boolean hasBeenBound = false; - - private boolean ignoreNestedProperties = false; - - private String targetName; - - private ConversionService conversionService; - - private boolean resolvePlaceholders = true; - - /** - * Create a new {@link PropertiesConfigurationFactory} instance. - * - * @param target the target object to bind too - * @see #PropertiesConfigurationFactory(Class) - */ - public PropertiesConfigurationFactory(T target){ - Assert.notNull(target, "target must not be null"); - this.target = target; - } - - /** - * Create a new {@link PropertiesConfigurationFactory} instance. - * - * @param type the target type - * @see #PropertiesConfigurationFactory(Class) - */ - @SuppressWarnings("unchecked") - public PropertiesConfigurationFactory(Class type){ - Assert.notNull(type, "type must not be null"); - this.target = (T) BeanUtils.instantiate(type); - } - - /** - * Flag to disable binding of nested properties (i.e. those with period - * separators in their paths). Can be useful to disable this if the name prefix - * is empty and you don't want to ignore unknown fields. - * - * @param ignoreNestedProperties the flag to set (default false) - */ - public void setIgnoreNestedProperties(boolean ignoreNestedProperties) { - this.ignoreNestedProperties = ignoreNestedProperties; - } - - /** - * Set whether to ignore unknown fields, that is, whether to ignore bind - * parameters that do not have corresponding fields in the target object. - *

- * Default is "true". Turn this off to enforce that all bind parameters must - * have a matching field in the target object. - * - * @param ignoreUnknownFields if unknown fields should be ignored - */ - public void setIgnoreUnknownFields(boolean ignoreUnknownFields) { - this.ignoreUnknownFields = ignoreUnknownFields; - } - - /** - * Set whether to ignore invalid fields, that is, whether to ignore bind - * parameters that have corresponding fields in the target object which are not - * accessible (for example because of null values in the nested path). - *

- * Default is "false". Turn this on to ignore bind parameters for nested objects - * in non-existing parts of the target object graph. - * - * @param ignoreInvalidFields if invalid fields should be ignored - */ - public void setIgnoreInvalidFields(boolean ignoreInvalidFields) { - this.ignoreInvalidFields = ignoreInvalidFields; - } - - /** - * Set the target name. - * - * @param targetName the target name - */ - public void setTargetName(String targetName) { - this.targetName = targetName; - } - - @Override - public void setApplicationContext(ApplicationContext applicationContext) { - this.applicationContext = applicationContext; - } - - /** - * Set the message source. - * - * @param messageSource the message source - */ - @Override - public void setMessageSource(MessageSource messageSource) { - this.messageSource = messageSource; - } - - /** - * Set the property sources. - * - * @param propertySources the property sources - */ - public void setPropertySources(PropertySources propertySources) { - this.propertySources = propertySources; - } - - /** - * Set the conversion service. - * - * @param conversionService the conversion service - */ - public void setConversionService(ConversionService conversionService) { - this.conversionService = conversionService; - } - - /** - * Set the validator. - * - * @param validator the validator - */ - public void setValidator(Validator validator) { - this.validator = validator; - } - - /** - * Set a flag to indicate that an exception should be raised if a Validator is - * available and validation fails. - * - * @param exceptionIfInvalid the flag to set - * @deprecated as of 1.5, do not specify a {@link Validator} if validation - * should not occur - */ - @Deprecated - public void setExceptionIfInvalid(boolean exceptionIfInvalid) { - this.exceptionIfInvalid = exceptionIfInvalid; - } - - /** - * Flag to indicate that placeholders should be replaced during binding. Default - * is true. - * - * @param resolvePlaceholders flag value - */ - public void setResolvePlaceholders(boolean resolvePlaceholders) { - this.resolvePlaceholders = resolvePlaceholders; - } - - @Override - public void afterPropertiesSet() throws Exception { - bindPropertiesToTarget(); - } - - @Override - public Class getObjectType() { - if (this.target == null) { - return Object.class; - } - return this.target.getClass(); - } - - @Override - public boolean isSingleton() { - return true; - } - - @Override - public T getObject() throws Exception { - if (!this.hasBeenBound) { - bindPropertiesToTarget(); - } - return this.target; - } - - public void bindPropertiesToTarget() throws BindException { - Assert.state(this.propertySources != null, "PropertySources should not be null"); - try { - if (logger.isTraceEnabled()) { - logger.trace("Property Sources: " + this.propertySources); - - } - this.hasBeenBound = true; - doBindPropertiesToTarget(); - } catch (BindException ex) { - if (this.exceptionIfInvalid) { - throw ex; - } - logger.error("Failed to load Properties validation bean. " + "Your Properties may be invalid.", ex); - } - } - - private void doBindPropertiesToTarget() throws BindException { - RelaxedDataBinder dataBinder = (this.targetName != null ? new RelaxedDataBinder(this.target, - this.targetName) : new RelaxedDataBinder(this.target)); - if (this.validator != null && this.validator.supports(dataBinder.getTarget().getClass())) { - dataBinder.setValidator(this.validator); - } - if (this.conversionService != null) { - dataBinder.setConversionService(this.conversionService); - } - dataBinder.setAutoGrowCollectionLimit(Integer.MAX_VALUE); - dataBinder.setIgnoreNestedProperties(this.ignoreNestedProperties); - dataBinder.setIgnoreInvalidFields(this.ignoreInvalidFields); - dataBinder.setIgnoreUnknownFields(this.ignoreUnknownFields); - customizeBinder(dataBinder); - if (this.applicationContext != null) { - ResourceEditorRegistrar resourceEditorRegistrar = new ResourceEditorRegistrar(this.applicationContext, - this.applicationContext.getEnvironment()); - resourceEditorRegistrar.registerCustomEditors(dataBinder); - } - Iterable relaxedTargetNames = getRelaxedTargetNames(); - Set names = getNames(relaxedTargetNames); - PropertyValues propertyValues = getPropertySourcesPropertyValues(names, relaxedTargetNames); - dataBinder.bind(propertyValues); - if (this.validator != null) { - dataBinder.validate(); - } - checkForBindingErrors(dataBinder); - } - - private Iterable getRelaxedTargetNames() { - return (this.target != null - && StringUtils.hasLength(this.targetName) ? new RelaxedNames(this.targetName) : null); - } - - private Set getNames(Iterable prefixes) { - Set names = new LinkedHashSet<>(); - if (this.target != null) { - PropertyDescriptor[] descriptors = BeanUtils.getPropertyDescriptors(this.target.getClass()); - for (PropertyDescriptor descriptor : descriptors) { - String name = descriptor.getName(); - if (!name.equals("class")) { - RelaxedNames relaxedNames = RelaxedNames.forCamelCase(name); - if (prefixes == null) { - for (String relaxedName : relaxedNames) { - names.add(relaxedName); - } - } else { - for (String prefix : prefixes) { - for (String relaxedName : relaxedNames) { - names.add(prefix + "." + relaxedName); - names.add(prefix + "_" + relaxedName); - } - } - } - } - } - } - return names; - } - - private PropertyValues getPropertySourcesPropertyValues(Set names, Iterable relaxedTargetNames) { - PropertyNamePatternsMatcher includes = getPropertyNamePatternsMatcher(names, relaxedTargetNames); - return new PropertySourcesPropertyValues(this.propertySources, names, includes, this.resolvePlaceholders); - } - - private PropertyNamePatternsMatcher getPropertyNamePatternsMatcher(Set names, - Iterable relaxedTargetNames) { - if (this.ignoreUnknownFields && !isMapTarget()) { - // Since unknown fields are ignored we can filter them out early to save - // unnecessary calls to the PropertySource. - return new DefaultPropertyNamePatternsMatcher(EXACT_DELIMITERS, true, names); - } - if (relaxedTargetNames != null) { - // We can filter properties to those starting with the target name, but - // we can't do a complete filter since we need to trigger the - // unknown fields check - Set relaxedNames = new HashSet<>(); - for (String relaxedTargetName : relaxedTargetNames) { - relaxedNames.add(relaxedTargetName); - } - return new DefaultPropertyNamePatternsMatcher(TARGET_NAME_DELIMITERS, true, relaxedNames); - } - // Not ideal, we basically can't filter anything - return PropertyNamePatternsMatcher.ALL; - } - - private boolean isMapTarget() { - return this.target != null && Map.class.isAssignableFrom(this.target.getClass()); - } - - private void checkForBindingErrors(RelaxedDataBinder dataBinder) throws BindException { - BindingResult errors = dataBinder.getBindingResult(); - if (errors.hasErrors()) { - logger.error("Properties configuration failed validation"); - for (ObjectError error : errors.getAllErrors()) { - logger.error(this.messageSource != null ? this.messageSource.getMessage(error, Locale.getDefault()) - + " (" + error + ")" : error); - } - if (this.exceptionIfInvalid) { - throw new BindException(errors); - } - } - } - - /** - * Customize the data binder. - * - * @param dataBinder the data binder that will be used to bind and validate - */ - protected void customizeBinder(DataBinder dataBinder) { - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyNamePatternsMatcher.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyNamePatternsMatcher.java deleted file mode 100644 index 47d8a780cf..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyNamePatternsMatcher.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -/** - * Strategy interface used to check if a property name matches specific - * criteria. - * - * @author Phillip Webb - * @since 1.2.0 - */ -interface PropertyNamePatternsMatcher { - - PropertyNamePatternsMatcher ALL = propertyName -> true; - - PropertyNamePatternsMatcher NONE = propertyName -> false; - - /** - * Return {@code true} of the property name matches. - * - * @param propertyName the property name - * @return {@code true} if the property name matches - */ - boolean matches(String propertyName); - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyOrigin.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyOrigin.java deleted file mode 100644 index 7ad35f1a63..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertyOrigin.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import com.alibaba.otter.canal.client.adapter.config.common.PropertySource; - -/** - * The origin of a property, specifically its source and its name before any - * prefix was removed. - * - * @author Andy Wilkinson - * @since 1.3.0 - */ -public class PropertyOrigin { - - private final PropertySource source; - - private final String name; - - PropertyOrigin(PropertySource source, String name){ - this.name = name; - this.source = source; - } - - public PropertySource getSource() { - return this.source; - } - - public String getName() { - return this.name; - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyResolver.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyResolver.java deleted file mode 100644 index 520e857faa..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyResolver.java +++ /dev/null @@ -1,164 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import org.springframework.core.convert.ConversionException; -import org.springframework.core.env.AbstractEnvironment; -import org.springframework.core.env.AbstractPropertyResolver; -import org.springframework.core.env.PropertyResolver; -import org.springframework.util.ClassUtils; - -import com.alibaba.otter.canal.client.adapter.config.common.PropertySource; -import com.alibaba.otter.canal.client.adapter.config.common.PropertySources; - -/** - * {@link PropertyResolver} implementation that resolves property values against - * an underlying set of {@link PropertySources}. - * - * @author Chris Beams - * @author Juergen Hoeller - * @see PropertySource - * @see PropertySources - * @see AbstractEnvironment - * @since 3.1 - */ -public class PropertySourcesPropertyResolver extends AbstractPropertyResolver { - - private final PropertySources propertySources; - - /** - * Create a new resolver against the given property sources. - * - * @param propertySources the set of {@link PropertySource} objects to use - */ - public PropertySourcesPropertyResolver(PropertySources propertySources){ - this.propertySources = propertySources; - } - - @Override - public boolean containsProperty(String key) { - if (this.propertySources != null) { - for (PropertySource propertySource : this.propertySources) { - if (propertySource.containsProperty(key)) { - return true; - } - } - } - return false; - } - - @Override - public String getProperty(String key) { - return getProperty(key, String.class, true); - } - - @Override - public T getProperty(String key, Class targetValueType) { - return getProperty(key, targetValueType, true); - } - - @Override - protected String getPropertyAsRawString(String key) { - return getProperty(key, String.class, false); - } - - protected T getProperty(String key, Class targetValueType, boolean resolveNestedPlaceholders) { - if (this.propertySources != null) { - for (PropertySource propertySource : this.propertySources) { - if (logger.isTraceEnabled()) { - logger - .trace("Searching for key '" + key + "' in PropertySource '" + propertySource.getName() + "'"); - } - Object value = propertySource.getProperty(key); - if (value != null) { - if (resolveNestedPlaceholders && value instanceof String) { - value = resolveNestedPlaceholders((String) value); - } - logKeyFound(key, propertySource, value); - return convertValueIfNecessary(value, targetValueType); - } - } - } - if (logger.isDebugEnabled()) { - logger.debug("Could not find key '" + key + "' in any property source"); - } - return null; - } - - @Deprecated - public Class getPropertyAsClass(String key, Class targetValueType) { - if (this.propertySources != null) { - for (PropertySource propertySource : this.propertySources) { - if (logger.isTraceEnabled()) { - logger.trace(String.format("Searching for key '%s' in [%s]", key, propertySource.getName())); - } - Object value = propertySource.getProperty(key); - if (value != null) { - logKeyFound(key, propertySource, value); - Class clazz; - if (value instanceof String) { - try { - clazz = ClassUtils.forName((String) value, null); - } catch (Exception ex) { - throw new PropertySourcesPropertyResolver.ClassConversionException((String) value, - targetValueType, - ex); - } - } else if (value instanceof Class) { - clazz = (Class) value; - } else { - clazz = value.getClass(); - } - if (!targetValueType.isAssignableFrom(clazz)) { - throw new PropertySourcesPropertyResolver.ClassConversionException(clazz, targetValueType); - } - @SuppressWarnings("unchecked") - Class targetClass = (Class) clazz; - return targetClass; - } - } - } - if (logger.isDebugEnabled()) { - logger.debug(String.format("Could not find key '%s' in any property source", key)); - } - return null; - } - - /** - * Log the given key as found in the given {@link PropertySource}, resulting in - * the given value. - *

- * The default implementation writes a debug log message with key and source. As - * of 4.3.3, this does not log the value anymore in order to avoid accidental - * logging of sensitive settings. Subclasses may override this method to change - * the log level and/or log message, including the property's value if desired. - * - * @param key the key found - * @param propertySource the {@code PropertySource} that the key has been found - * in - * @param value the corresponding value - * @since 4.3.1 - */ - protected void logKeyFound(String key, PropertySource propertySource, Object value) { - if (logger.isDebugEnabled()) { - logger.debug("Found key '" + key + "' in PropertySource '" + propertySource.getName() - + "' with value of type " + value.getClass().getSimpleName()); - } - } - - @SuppressWarnings("serial") - @Deprecated - private static class ClassConversionException extends ConversionException { - - public ClassConversionException(Class actual, Class expected){ - super(String - .format("Actual type %s is not assignable to expected type %s", actual.getName(), expected.getName())); - } - - public ClassConversionException(String actual, Class expected, Exception ex){ - super( - String - .format("Could not find/load class %s during attempt to convert to %s", actual, expected.getName()), - ex); - } - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyValues.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyValues.java deleted file mode 100644 index 70ad5731af..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/PropertySourcesPropertyValues.java +++ /dev/null @@ -1,233 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.regex.Pattern; - -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.PropertyValue; -import org.springframework.beans.PropertyValues; -import org.springframework.util.Assert; -import org.springframework.validation.DataBinder; - -import com.alibaba.otter.canal.client.adapter.config.common.CompositePropertySource; -import com.alibaba.otter.canal.client.adapter.config.common.EnumerablePropertySource; -import com.alibaba.otter.canal.client.adapter.config.common.PropertySource; -import com.alibaba.otter.canal.client.adapter.config.common.PropertySources; - -/** - * A {@link PropertyValues} implementation backed by a {@link PropertySources}, - * bridging the two abstractions and allowing (for instance) a regular - * {@link DataBinder} to be used with the latter. - * - * @author Dave Syer - * @author Phillip Webb - */ -public class PropertySourcesPropertyValues implements PropertyValues { - - private static final Pattern COLLECTION_PROPERTY = Pattern - .compile("\\[(\\d+)\\](\\.\\S+)?"); - - private final PropertySources propertySources; - - private final Collection nonEnumerableFallbackNames; - - private final PropertyNamePatternsMatcher includes; - - private final Map propertyValues = new LinkedHashMap<>(); - - private final ConcurrentHashMap> collectionOwners = new ConcurrentHashMap<>(); - - private final boolean resolvePlaceholders; - - /** - * Create a new PropertyValues from the given PropertySources. - * - * @param propertySources a PropertySources instance - */ - public PropertySourcesPropertyValues(PropertySources propertySources){ - this(propertySources, true); - } - - /** - * Create a new PropertyValues from the given PropertySources that will - * optionally resolve placeholders. - * - * @param propertySources a PropertySources instance - * @param resolvePlaceholders {@code true} if placeholders should be resolved. - * @since 1.5.2 - */ - public PropertySourcesPropertyValues(PropertySources propertySources, boolean resolvePlaceholders){ - this(propertySources, (Collection) null, PropertyNamePatternsMatcher.ALL, resolvePlaceholders); - } - - /** - * Create a new PropertyValues from the given PropertySources. - * - * @param propertySources a PropertySources instance - * @param includePatterns property name patterns to include from system - * properties and environment variables - * @param nonEnumerableFallbackNames the property names to try in lieu of an - * {@link EnumerablePropertySource}. - */ - public PropertySourcesPropertyValues(PropertySources propertySources, Collection includePatterns, - Collection nonEnumerableFallbackNames){ - this(propertySources, - nonEnumerableFallbackNames, - new PatternPropertyNamePatternsMatcher(includePatterns), - true); - } - - /** - * Create a new PropertyValues from the given PropertySources. - * - * @param propertySources a PropertySources instance - * @param nonEnumerableFallbackNames the property names to try in lieu of an - * {@link EnumerablePropertySource}. - * @param includes the property name patterns to include - * @param resolvePlaceholders flag to indicate the placeholders should be - * resolved - */ - PropertySourcesPropertyValues(PropertySources propertySources, Collection nonEnumerableFallbackNames, - PropertyNamePatternsMatcher includes, boolean resolvePlaceholders){ - Assert.notNull(propertySources, "PropertySources must not be null"); - Assert.notNull(includes, "Includes must not be null"); - this.propertySources = propertySources; - this.nonEnumerableFallbackNames = nonEnumerableFallbackNames; - this.includes = includes; - this.resolvePlaceholders = resolvePlaceholders; - PropertySourcesPropertyResolver resolver = new PropertySourcesPropertyResolver(propertySources); - for (PropertySource source : propertySources) { - processPropertySource(source, resolver); - } - } - - private void processPropertySource(PropertySource source, PropertySourcesPropertyResolver resolver) { - if (source instanceof CompositePropertySource) { - processCompositePropertySource((CompositePropertySource) source, resolver); - } else if (source instanceof EnumerablePropertySource) { - processEnumerablePropertySource((EnumerablePropertySource) source, resolver, this.includes); - } else { - processNonEnumerablePropertySource(source, resolver); - } - } - - private void processCompositePropertySource(CompositePropertySource source, - PropertySourcesPropertyResolver resolver) { - for (PropertySource nested : source.getPropertySources()) { - processPropertySource(nested, resolver); - } - } - - private void processEnumerablePropertySource(EnumerablePropertySource source, - PropertySourcesPropertyResolver resolver, - PropertyNamePatternsMatcher includes) { - if (source.getPropertyNames().length > 0) { - for (String propertyName : source.getPropertyNames()) { - if (includes.matches(propertyName)) { - Object value = getEnumerableProperty(source, resolver, propertyName); - putIfAbsent(propertyName, value, source); - } - } - } - } - - private Object getEnumerableProperty(EnumerablePropertySource source, PropertySourcesPropertyResolver resolver, - String propertyName) { - try { - if (this.resolvePlaceholders) { - return resolver.getProperty(propertyName, Object.class); - } - } catch (RuntimeException ex) { - // Probably could not resolve placeholders, ignore it here - } - return source.getProperty(propertyName); - } - - private void processNonEnumerablePropertySource(PropertySource source, - PropertySourcesPropertyResolver resolver) { - // We can only do exact matches for non-enumerable property names, but - // that's better than nothing... - if (this.nonEnumerableFallbackNames == null) { - return; - } - for (String propertyName : this.nonEnumerableFallbackNames) { - if (!source.containsProperty(propertyName)) { - continue; - } - Object value = null; - try { - value = resolver.getProperty(propertyName, Object.class); - } catch (RuntimeException ex) { - // Probably could not convert to Object, weird, but ignorable - } - if (value == null) { - value = source.getProperty(propertyName.toUpperCase(Locale.ENGLISH)); - } - putIfAbsent(propertyName, value, source); - } - } - - @Override - public PropertyValue[] getPropertyValues() { - Collection values = this.propertyValues.values(); - return values.toArray(new PropertyValue[values.size()]); - } - - @Override - public PropertyValue getPropertyValue(String propertyName) { - PropertyValue propertyValue = this.propertyValues.get(propertyName); - if (propertyValue != null) { - return propertyValue; - } - for (PropertySource source : this.propertySources) { - Object value = source.getProperty(propertyName); - propertyValue = putIfAbsent(propertyName, value, source); - if (propertyValue != null) { - return propertyValue; - } - } - return null; - } - - private PropertyValue putIfAbsent(String propertyName, Object value, PropertySource source) { - if (value != null && !this.propertyValues.containsKey(propertyName)) { - PropertySource collectionOwner = this.collectionOwners - .putIfAbsent(COLLECTION_PROPERTY.matcher(propertyName).replaceAll("[]"), source); - if (collectionOwner == null || collectionOwner == source) { - PropertyValue propertyValue = new OriginCapablePropertyValue(propertyName, value, propertyName, source); - this.propertyValues.put(propertyName, propertyValue); - return propertyValue; - } - } - return null; - } - - @Override - public PropertyValues changesSince(PropertyValues old) { - MutablePropertyValues changes = new MutablePropertyValues(); - // for each property value in the new set - for (PropertyValue newValue : getPropertyValues()) { - // if there wasn't an old one, add it - PropertyValue oldValue = old.getPropertyValue(newValue.getName()); - if (oldValue == null || !oldValue.equals(newValue)) { - changes.addPropertyValue(newValue); - } - } - return changes; - } - - @Override - public boolean contains(String propertyName) { - return getPropertyValue(propertyName) != null; - } - - @Override - public boolean isEmpty() { - return this.propertyValues.isEmpty(); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java deleted file mode 100644 index ab8e550673..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedConversionService.java +++ /dev/null @@ -1,127 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.util.EnumSet; -import java.util.Locale; -import java.util.Set; - -import org.springframework.core.convert.ConversionFailedException; -import org.springframework.core.convert.ConversionService; -import org.springframework.core.convert.TypeDescriptor; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.convert.converter.ConverterFactory; -import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.util.Assert; - -/** - * Internal {@link ConversionService} used by {@link RelaxedDataBinder} to - * support additional relaxed conversion. - * - * @author Phillip Webb - * @author Stephane Nicoll - * @since 1.1.0 - */ -class RelaxedConversionService implements ConversionService { - - private final ConversionService conversionService; - - private final GenericConversionService additionalConverters; - - /** - * Create a new {@link RelaxedConversionService} instance. - * - * @param conversionService and option root conversion service - */ - RelaxedConversionService(ConversionService conversionService){ - this.conversionService = conversionService; - this.additionalConverters = new GenericConversionService(); - DefaultConversionService.addCollectionConverters(this.additionalConverters); - this.additionalConverters - .addConverterFactory(new RelaxedConversionService.StringToEnumIgnoringCaseConverterFactory()); - this.additionalConverters.addConverter(new StringToCharArrayConverter()); - } - - @Override - public boolean canConvert(Class sourceType, Class targetType) { - return (this.conversionService != null && this.conversionService.canConvert(sourceType, targetType)) - || this.additionalConverters.canConvert(sourceType, targetType); - } - - @Override - public boolean canConvert(TypeDescriptor sourceType, TypeDescriptor targetType) { - return (this.conversionService != null && this.conversionService.canConvert(sourceType, targetType)) - || this.additionalConverters.canConvert(sourceType, targetType); - } - - @Override - @SuppressWarnings("unchecked") - public T convert(Object source, Class targetType) { - Assert.notNull(targetType, "The targetType to convert to cannot be null"); - return (T) convert(source, TypeDescriptor.forObject(source), TypeDescriptor.valueOf(targetType)); - } - - @Override - public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { - if (this.conversionService != null) { - try { - return this.conversionService.convert(source, sourceType, targetType); - } catch (ConversionFailedException ex) { - // Ignore and try the additional converters - } - } - return this.additionalConverters.convert(source, sourceType, targetType); - } - - /** - * Clone of Spring's package private StringToEnumConverterFactory, but ignoring - * the case of the source. - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - private static class StringToEnumIgnoringCaseConverterFactory implements ConverterFactory { - - @Override - public Converter getConverter(Class targetType) { - Class enumType = targetType; - while (enumType != null && !enumType.isEnum()) { - enumType = enumType.getSuperclass(); - } - Assert.notNull(enumType, "The target type " + targetType.getName() + " does not refer to an enum"); - return new RelaxedConversionService.StringToEnumIgnoringCaseConverterFactory.StringToEnum(enumType); - } - - private static class StringToEnum implements Converter { - - private final Class enumType; - - StringToEnum(Class enumType){ - this.enumType = enumType; - } - - @Override - public T convert(String source) { - if (source.isEmpty()) { - // It's an empty enum identifier: reset the enum value to null. - return null; - } - source = source.trim(); - for (T candidate : (Set) EnumSet.allOf(this.enumType)) { - RelaxedNames names = new RelaxedNames( - candidate.name().replace('_', '-').toLowerCase(Locale.ENGLISH)); - for (String name : names) { - if (name.equals(source)) { - return candidate; - } - } - if (candidate.name().equalsIgnoreCase(source)) { - return candidate; - } - } - throw new IllegalArgumentException( - "No enum constant " + this.enumType.getCanonicalName() + "." + source); - } - - } - - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedDataBinder.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedDataBinder.java deleted file mode 100644 index 7723eb0f16..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedDataBinder.java +++ /dev/null @@ -1,729 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.beans.PropertyEditor; -import java.net.InetAddress; -import java.util.*; - -import org.springframework.beans.*; -import org.springframework.beans.propertyeditors.FileEditor; -import org.springframework.core.convert.ConversionService; -import org.springframework.core.convert.TypeDescriptor; -import org.springframework.core.env.StandardEnvironment; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; -import org.springframework.util.StringUtils; -import org.springframework.validation.AbstractPropertyBindingResult; -import org.springframework.validation.BeanPropertyBindingResult; -import org.springframework.validation.DataBinder; - -/** - * Binder implementation that allows caller to bind to maps and also allows - * property names to match a bit loosely (if underscores or dashes are removed - * and replaced with camel case for example). - * - * @author Dave Syer - * @author Phillip Webb - * @author Stephane Nicoll - * @author Andy Wilkinson - * @see RelaxedNames - */ -public class RelaxedDataBinder extends DataBinder { - - private static final Set> EXCLUDED_EDITORS; - - static { - Set> excluded = new HashSet<>(); - excluded.add(FileEditor.class); - EXCLUDED_EDITORS = Collections.unmodifiableSet(excluded); - } - - private static final Object BLANK = new Object(); - - private String namePrefix; - - private boolean ignoreNestedProperties; - - private MultiValueMap nameAliases = new LinkedMultiValueMap<>(); - - /** - * Create a new {@link RelaxedDataBinder} instance. - * - * @param target the target into which properties are bound - */ - public RelaxedDataBinder(Object target){ - super(wrapTarget(target)); - } - - /** - * Create a new {@link RelaxedDataBinder} instance. - * - * @param target the target into which properties are bound - * @param namePrefix An optional prefix to be used when reading properties - */ - public RelaxedDataBinder(Object target, String namePrefix){ - super(wrapTarget(target), (StringUtils.hasLength(namePrefix) ? namePrefix : DEFAULT_OBJECT_NAME)); - this.namePrefix = cleanNamePrefix(namePrefix); - } - - private String cleanNamePrefix(String namePrefix) { - if (!StringUtils.hasLength(namePrefix)) { - return null; - } - return (namePrefix.endsWith(".") ? namePrefix : namePrefix + "."); - } - - /** - * Flag to disable binding of nested properties (i.e. those with period - * separators in their paths). Can be useful to disable this if the name prefix - * is empty and you don't want to ignore unknown fields. - * - * @param ignoreNestedProperties the flag to set (default false) - */ - public void setIgnoreNestedProperties(boolean ignoreNestedProperties) { - this.ignoreNestedProperties = ignoreNestedProperties; - } - - /** - * Set name aliases. - * - * @param aliases a map of property name to aliases - */ - public void setNameAliases(Map> aliases) { - this.nameAliases = new LinkedMultiValueMap<>(aliases); - } - - /** - * Add aliases to the {@link DataBinder}. - * - * @param name the property name to alias - * @param alias aliases for the property names - * @return this instance - */ - public RelaxedDataBinder withAlias(String name, String... alias) { - for (String value : alias) { - this.nameAliases.add(name, value); - } - return this; - } - - @Override - protected void doBind(MutablePropertyValues propertyValues) { - super.doBind(modifyProperties(propertyValues, getTarget())); - } - - /** - * Modify the property values so that period separated property paths are valid - * for map keys. Also creates new maps for properties of map type that are null - * (assuming all maps are potentially nested). The standard bracket {@code[...]} - * dereferencing is also accepted. - * - * @param propertyValues the property values - * @param target the target object - * @return modified property values - */ - private MutablePropertyValues modifyProperties(MutablePropertyValues propertyValues, Object target) { - propertyValues = getPropertyValuesForNamePrefix(propertyValues); - if (target instanceof RelaxedDataBinder.MapHolder) { - propertyValues = addMapPrefix(propertyValues); - } - BeanWrapper wrapper = new BeanWrapperImpl(target); - wrapper.setConversionService(new RelaxedConversionService(getConversionService())); - wrapper.setAutoGrowNestedPaths(true); - List sortedValues = new ArrayList<>(); - Set modifiedNames = new HashSet<>(); - List sortedNames = getSortedPropertyNames(propertyValues); - for (String name : sortedNames) { - PropertyValue propertyValue = propertyValues.getPropertyValue(name); - PropertyValue modifiedProperty = modifyProperty(wrapper, propertyValue); - if (modifiedNames.add(modifiedProperty.getName())) { - sortedValues.add(modifiedProperty); - } - } - return new MutablePropertyValues(sortedValues); - } - - private List getSortedPropertyNames(MutablePropertyValues propertyValues) { - List names = new LinkedList<>(); - for (PropertyValue propertyValue : propertyValues.getPropertyValueList()) { - names.add(propertyValue.getName()); - } - sortPropertyNames(names); - return names; - } - - /** - * Sort by name so that parent properties get processed first (e.g. 'foo.bar' - * before 'foo.bar.spam'). Don't use Collections.sort() because the order might - * be significant for other property names (it shouldn't be but who knows what - * people might be relying on, e.g. HSQL has a JDBCXADataSource where - * "databaseName" is a synonym for "url"). - * - * @param names the names to sort - */ - private void sortPropertyNames(List names) { - for (String name : new ArrayList<>(names)) { - int propertyIndex = names.indexOf(name); - RelaxedDataBinder.BeanPath path = new RelaxedDataBinder.BeanPath(name); - for (String prefix : path.prefixes()) { - int prefixIndex = names.indexOf(prefix); - if (prefixIndex >= propertyIndex) { - // The child property has a parent in the list in the wrong order - names.remove(name); - names.add(prefixIndex, name); - } - } - } - } - - private MutablePropertyValues addMapPrefix(MutablePropertyValues propertyValues) { - MutablePropertyValues rtn = new MutablePropertyValues(); - for (PropertyValue pv : propertyValues.getPropertyValues()) { - rtn.add("map." + pv.getName(), pv.getValue()); - } - return rtn; - } - - private MutablePropertyValues getPropertyValuesForNamePrefix(MutablePropertyValues propertyValues) { - if (!StringUtils.hasText(this.namePrefix) && !this.ignoreNestedProperties) { - return propertyValues; - } - MutablePropertyValues rtn = new MutablePropertyValues(); - for (PropertyValue value : propertyValues.getPropertyValues()) { - String name = value.getName(); - for (String prefix : new RelaxedNames(stripLastDot(this.namePrefix))) { - for (String separator : new String[] { ".", "_" }) { - String candidate = (StringUtils.hasLength(prefix) ? prefix + separator : prefix); - if (name.startsWith(candidate)) { - name = name.substring(candidate.length()); - if (!(this.ignoreNestedProperties && name.contains("."))) { - PropertyOrigin propertyOrigin = OriginCapablePropertyValue.getOrigin(value); - rtn.addPropertyValue( - new OriginCapablePropertyValue(name, value.getValue(), propertyOrigin)); - } - } - } - } - } - return rtn; - } - - private String stripLastDot(String string) { - if (StringUtils.hasLength(string) && string.endsWith(".")) { - string = string.substring(0, string.length() - 1); - } - return string; - } - - private PropertyValue modifyProperty(BeanWrapper target, PropertyValue propertyValue) { - String name = propertyValue.getName(); - String normalizedName = normalizePath(target, name); - if (!normalizedName.equals(name)) { - return new PropertyValue(normalizedName, propertyValue.getValue()); - } - return propertyValue; - } - - /** - * Normalize a bean property path to a format understood by a BeanWrapper. This - * is used so that - *

    - *
  • Fuzzy matching can be employed for bean property names
  • - *
  • Period separators can be used instead of indexing ([...]) for map - * keys
  • - *
- * - * @param wrapper a bean wrapper for the object to bind - * @param path the bean path to bind - * @return a transformed path with correct bean wrapper syntax - */ - protected String normalizePath(BeanWrapper wrapper, String path) { - return initializePath(wrapper, new RelaxedDataBinder.BeanPath(path), 0); - } - - @Override - protected AbstractPropertyBindingResult createBeanPropertyBindingResult() { - return new RelaxedDataBinder.RelaxedBeanPropertyBindingResult(getTarget(), - getObjectName(), - isAutoGrowNestedPaths(), - getAutoGrowCollectionLimit(), - getConversionService()); - } - - private String initializePath(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) { - String prefix = path.prefix(index); - String key = path.name(index); - if (path.isProperty(index)) { - key = getActualPropertyName(wrapper, prefix, key); - path.rename(index, key); - } - if (path.name(++index) == null) { - return path.toString(); - } - String name = path.prefix(index); - TypeDescriptor descriptor = wrapper.getPropertyTypeDescriptor(name); - if (descriptor == null || descriptor.isMap()) { - if (isMapValueStringType(descriptor) || isBlanked(wrapper, name, path.name(index))) { - path.collapseKeys(index); - } - path.mapIndex(index); - extendMapIfNecessary(wrapper, path, index); - } else if (descriptor.isCollection()) { - extendCollectionIfNecessary(wrapper, path, index); - } else if (descriptor.getType().equals(Object.class)) { - if (isBlanked(wrapper, name, path.name(index))) { - path.collapseKeys(index); - } - path.mapIndex(index); - if (path.isLastNode(index)) { - wrapper.setPropertyValue(path.toString(), BLANK); - } else { - String next = path.prefix(index + 1); - if (wrapper.getPropertyValue(next) == null) { - wrapper.setPropertyValue(next, new LinkedHashMap()); - } - } - } - return initializePath(wrapper, path, index); - } - - private boolean isMapValueStringType(TypeDescriptor descriptor) { - if (descriptor == null || descriptor.getMapValueTypeDescriptor() == null) { - return false; - } - if (Properties.class.isAssignableFrom(descriptor.getObjectType())) { - // Properties is declared as Map but we know it's really - // Map - return true; - } - Class valueType = descriptor.getMapValueTypeDescriptor().getObjectType(); - return (valueType != null && CharSequence.class.isAssignableFrom(valueType)); - } - - @SuppressWarnings("rawtypes") - private boolean isBlanked(BeanWrapper wrapper, String propertyName, String key) { - Object value = (wrapper.isReadableProperty(propertyName) ? wrapper.getPropertyValue(propertyName) : null); - if (value instanceof Map) { - if (((Map) value).get(key) == BLANK) { - return true; - } - } - return false; - } - - private void extendCollectionIfNecessary(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) { - String name = path.prefix(index); - TypeDescriptor elementDescriptor = wrapper.getPropertyTypeDescriptor(name).getElementTypeDescriptor(); - if (!elementDescriptor.isMap() && !elementDescriptor.isCollection() - && !elementDescriptor.getType().equals(Object.class)) { - return; - } - Object extend = new LinkedHashMap(); - if (!elementDescriptor.isMap() && path.isArrayIndex(index)) { - extend = new ArrayList<>(); - } - wrapper.setPropertyValue(path.prefix(index + 1), extend); - } - - private void extendMapIfNecessary(BeanWrapper wrapper, RelaxedDataBinder.BeanPath path, int index) { - String name = path.prefix(index); - TypeDescriptor parent = wrapper.getPropertyTypeDescriptor(name); - if (parent == null) { - return; - } - TypeDescriptor descriptor = parent.getMapValueTypeDescriptor(); - if (descriptor == null) { - descriptor = TypeDescriptor.valueOf(Object.class); - } - if (!descriptor.isMap() && !descriptor.isCollection() && !descriptor.getType().equals(Object.class)) { - return; - } - String extensionName = path.prefix(index + 1); - if (wrapper.isReadableProperty(extensionName)) { - Object currentValue = wrapper.getPropertyValue(extensionName); - if ((descriptor.isCollection() && currentValue instanceof Collection) - || (!descriptor.isCollection() && currentValue instanceof Map)) { - return; - } - } - Object extend = new LinkedHashMap(); - if (descriptor.isCollection()) { - extend = new ArrayList<>(); - } - if (descriptor.getType().equals(Object.class) && path.isLastNode(index)) { - extend = BLANK; - } - wrapper.setPropertyValue(extensionName, extend); - } - - private String getActualPropertyName(BeanWrapper target, String prefix, String name) { - String propertyName = resolvePropertyName(target, prefix, name); - if (propertyName == null) { - propertyName = resolveNestedPropertyName(target, prefix, name); - } - return (propertyName != null ? propertyName : name); - } - - private String resolveNestedPropertyName(BeanWrapper target, String prefix, String name) { - StringBuilder candidate = new StringBuilder(); - for (String field : name.split("[_\\-\\.]")) { - candidate.append(candidate.length() > 0 ? "." : ""); - candidate.append(field); - String nested = resolvePropertyName(target, prefix, candidate.toString()); - if (nested != null) { - Class type = target.getPropertyType(nested); - if ((type != null) && Map.class.isAssignableFrom(type)) { - // Special case for map property (gh-3836). - return nested + "[" + name.substring(candidate.length() + 1) + "]"; - } - String propertyName = resolvePropertyName(target, - joinString(prefix, nested), - name.substring(candidate.length() + 1)); - if (propertyName != null) { - return joinString(nested, propertyName); - } - } - } - return null; - } - - private String resolvePropertyName(BeanWrapper target, String prefix, String name) { - Iterable names = getNameAndAliases(name); - for (String nameOrAlias : names) { - for (String candidate : new RelaxedNames(nameOrAlias)) { - try { - if (target.getPropertyType(joinString(prefix, candidate)) != null) { - return candidate; - } - } catch (InvalidPropertyException ex) { - // swallow and continue - } - } - } - return null; - } - - private String joinString(String prefix, String name) { - return (StringUtils.hasLength(prefix) ? prefix + "." + name : name); - } - - private Iterable getNameAndAliases(String name) { - List aliases = this.nameAliases.get(name); - if (aliases == null) { - return Collections.singleton(name); - } - List nameAndAliases = new ArrayList<>(aliases.size() + 1); - nameAndAliases.add(name); - nameAndAliases.addAll(aliases); - return nameAndAliases; - } - - private static Object wrapTarget(Object target) { - if (target instanceof Map) { - @SuppressWarnings("unchecked") - Map map = (Map) target; - target = new RelaxedDataBinder.MapHolder(map); - } - return target; - } - - @Override - public void registerCustomEditor(Class requiredType, PropertyEditor propertyEditor) { - if (propertyEditor == null || !EXCLUDED_EDITORS.contains(propertyEditor.getClass())) { - super.registerCustomEditor(requiredType, propertyEditor); - } - } - - @Override - public void registerCustomEditor(Class requiredType, String field, PropertyEditor propertyEditor) { - if (propertyEditor == null || !EXCLUDED_EDITORS.contains(propertyEditor.getClass())) { - super.registerCustomEditor(requiredType, field, propertyEditor); - } - } - - /** - * Holder to allow Map targets to be bound. - */ - static class MapHolder { - - private Map map; - - MapHolder(Map map){ - this.map = map; - } - - public void setMap(Map map) { - this.map = map; - } - - public Map getMap() { - return this.map; - } - - } - - /** - * A path though properties of a bean. - */ - private static class BeanPath { - - private List nodes; - - BeanPath(String path){ - this.nodes = splitPath(path); - } - - public List prefixes() { - List prefixes = new ArrayList<>(); - for (int index = 1; index < this.nodes.size(); index++) { - prefixes.add(prefix(index)); - } - return prefixes; - } - - public boolean isLastNode(int index) { - return index >= this.nodes.size() - 1; - } - - private List splitPath(String path) { - List nodes = new ArrayList<>(); - String current = extractIndexedPaths(path, nodes); - for (String name : StringUtils.delimitedListToStringArray(current, ".")) { - if (StringUtils.hasText(name)) { - nodes.add(new RelaxedDataBinder.BeanPath.PropertyNode(name)); - } - } - return nodes; - } - - private String extractIndexedPaths(String path, List nodes) { - int startRef = path.indexOf("["); - String current = path; - while (startRef >= 0) { - if (startRef > 0) { - nodes.addAll(splitPath(current.substring(0, startRef))); - } - int endRef = current.indexOf("]", startRef); - if (endRef > 0) { - String sub = current.substring(startRef + 1, endRef); - if (sub.matches("[0-9]+")) { - nodes.add(new RelaxedDataBinder.BeanPath.ArrayIndexNode(sub)); - } else { - nodes.add(new RelaxedDataBinder.BeanPath.MapIndexNode(sub)); - } - } - current = current.substring(endRef + 1); - startRef = current.indexOf("["); - } - return current; - } - - public void collapseKeys(int index) { - List revised = new ArrayList<>(); - for (int i = 0; i < index; i++) { - revised.add(this.nodes.get(i)); - } - StringBuilder builder = new StringBuilder(); - for (int i = index; i < this.nodes.size(); i++) { - if (i > index) { - builder.append("."); - } - builder.append(this.nodes.get(i).name); - } - revised.add(new RelaxedDataBinder.BeanPath.PropertyNode(builder.toString())); - this.nodes = revised; - } - - public void mapIndex(int index) { - RelaxedDataBinder.BeanPath.PathNode node = this.nodes.get(index); - if (node instanceof RelaxedDataBinder.BeanPath.PropertyNode) { - node = ((RelaxedDataBinder.BeanPath.PropertyNode) node).mapIndex(); - } - this.nodes.set(index, node); - } - - public String prefix(int index) { - return range(0, index); - } - - public void rename(int index, String name) { - this.nodes.get(index).name = name; - } - - public String name(int index) { - if (index < this.nodes.size()) { - return this.nodes.get(index).name; - } - return null; - } - - private String range(int start, int end) { - StringBuilder builder = new StringBuilder(); - for (int i = start; i < end; i++) { - RelaxedDataBinder.BeanPath.PathNode node = this.nodes.get(i); - builder.append(node); - } - if (builder.toString().startsWith(("."))) { - builder.replace(0, 1, ""); - } - return builder.toString(); - } - - public boolean isArrayIndex(int index) { - return this.nodes.get(index) instanceof RelaxedDataBinder.BeanPath.ArrayIndexNode; - } - - public boolean isProperty(int index) { - return this.nodes.get(index) instanceof RelaxedDataBinder.BeanPath.PropertyNode; - } - - @Override - public String toString() { - return prefix(this.nodes.size()); - } - - private static class PathNode { - - protected String name; - - PathNode(String name){ - this.name = name; - } - - } - - private static class ArrayIndexNode extends RelaxedDataBinder.BeanPath.PathNode { - - ArrayIndexNode(String name){ - super(name); - } - - @Override - public String toString() { - return "[" + this.name + "]"; - } - - } - - private static class MapIndexNode extends RelaxedDataBinder.BeanPath.PathNode { - - MapIndexNode(String name){ - super(name); - } - - @Override - public String toString() { - return "[" + this.name + "]"; - } - - } - - private static class PropertyNode extends RelaxedDataBinder.BeanPath.PathNode { - - PropertyNode(String name){ - super(name); - } - - public RelaxedDataBinder.BeanPath.MapIndexNode mapIndex() { - return new RelaxedDataBinder.BeanPath.MapIndexNode(this.name); - } - - @Override - public String toString() { - return "." + this.name; - } - - } - - } - - /** - * Extended version of {@link BeanPropertyBindingResult} to support relaxed - * binding. - */ - private static class RelaxedBeanPropertyBindingResult extends BeanPropertyBindingResult { - - private RelaxedConversionService conversionService; - - RelaxedBeanPropertyBindingResult(Object target, String objectName, boolean autoGrowNestedPaths, - int autoGrowCollectionLimit, ConversionService conversionService){ - super(target, objectName, autoGrowNestedPaths, autoGrowCollectionLimit); - this.conversionService = new RelaxedConversionService(conversionService); - } - - @Override - protected BeanWrapper createBeanWrapper() { - BeanWrapper beanWrapper = new RelaxedDataBinder.RelaxedBeanWrapper(getTarget()); - beanWrapper.setConversionService(this.conversionService); - beanWrapper.registerCustomEditor(InetAddress.class, new InetAddressEditor()); - return beanWrapper; - } - - } - - /** - * Extended version of {@link BeanWrapperImpl} to support relaxed binding. - */ - private static class RelaxedBeanWrapper extends BeanWrapperImpl { - - private static final Set BENIGN_PROPERTY_SOURCE_NAMES; - - static { - Set names = new HashSet<>(); - names.add(StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME); - names.add(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME); - BENIGN_PROPERTY_SOURCE_NAMES = Collections.unmodifiableSet(names); - } - - RelaxedBeanWrapper(Object target){ - super(target); - } - - @Override - public void setPropertyValue(PropertyValue pv) throws BeansException { - try { - super.setPropertyValue(pv); - } catch (NotWritablePropertyException ex) { - PropertyOrigin origin = OriginCapablePropertyValue.getOrigin(pv); - if (isBenign(origin)) { - logger.debug("Ignoring benign property binding failure", ex); - return; - } - if (origin == null) { - throw ex; - } - throw new RelaxedBindingNotWritablePropertyException(ex, origin); - } - } - - private boolean isBenign(PropertyOrigin origin) { - String name = (origin != null ? origin.getSource().getName() : null); - return BENIGN_PROPERTY_SOURCE_NAMES.contains(name); - } - - } - - public static class RelaxedBindingNotWritablePropertyException extends NotWritablePropertyException { - - private final String message; - - private final PropertyOrigin propertyOrigin; - - RelaxedBindingNotWritablePropertyException(NotWritablePropertyException ex, PropertyOrigin propertyOrigin){ - super(ex.getBeanClass(), ex.getPropertyName()); - this.propertyOrigin = propertyOrigin; - this.message = "Failed to bind '" + propertyOrigin.getName() + "' from '" - + propertyOrigin.getSource().getName() + "' to '" + ex.getPropertyName() + "' property on '" - + ex.getBeanClass().getName() + "'"; - } - - @Override - public String getMessage() { - return this.message; - } - - public PropertyOrigin getPropertyOrigin() { - return this.propertyOrigin; - } - - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedNames.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedNames.java deleted file mode 100644 index 2c9dd70c37..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/RelaxedNames.java +++ /dev/null @@ -1,241 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.Locale; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.springframework.util.StringUtils; - -/** - * Generates relaxed name variations from a given source. - * - * @author Phillip Webb - * @author Dave Syer - * @see RelaxedDataBinder - */ -public final class RelaxedNames implements Iterable { - - private static final Pattern CAMEL_CASE_PATTERN = Pattern.compile("([^A-Z-])([A-Z])"); - - private static final Pattern SEPARATED_TO_CAMEL_CASE_PATTERN = Pattern.compile("[_\\-.]"); - - private final String name; - - private final Set values = new LinkedHashSet<>(); - - /** - * Create a new {@link RelaxedNames} instance. - * - * @param name the source name. For the maximum number of variations specify the - * name using dashed notation (e.g. {@literal my-property-name} - */ - public RelaxedNames(String name){ - this.name = (name != null ? name : ""); - initialize(RelaxedNames.this.name, this.values); - } - - @Override - public Iterator iterator() { - return this.values.iterator(); - } - - private void initialize(String name, Set values) { - if (values.contains(name)) { - return; - } - for (RelaxedNames.Variation variation : RelaxedNames.Variation.values()) { - for (RelaxedNames.Manipulation manipulation : RelaxedNames.Manipulation.values()) { - String result = name; - result = manipulation.apply(result); - result = variation.apply(result); - values.add(result); - initialize(result, values); - } - } - } - - /** - * Name variations. - */ - enum Variation { - - NONE { - - @Override - public String apply(String value) { - return value; - } - - }, - - LOWERCASE { - - @Override - public String apply(String value) { - return (value.isEmpty() ? value : value.toLowerCase(Locale.ENGLISH)); - } - - }, - - UPPERCASE { - - @Override - public String apply(String value) { - return (value.isEmpty() ? value : value.toUpperCase(Locale.ENGLISH)); - } - - }; - - public abstract String apply(String value); - - } - - /** - * Name manipulations. - */ - enum Manipulation { - - NONE { - - @Override - public String apply(String value) { - return value; - } - - }, - - HYPHEN_TO_UNDERSCORE { - - @Override - public String apply(String value) { - return (value.indexOf('-') != -1 ? value.replace('-', '_') : value); - } - - }, - - UNDERSCORE_TO_PERIOD { - - @Override - public String apply(String value) { - return (value.indexOf('_') != -1 ? value.replace('_', '.') : value); - } - - }, - - PERIOD_TO_UNDERSCORE { - - @Override - public String apply(String value) { - return (value.indexOf('.') != -1 ? value.replace('.', '_') : value); - } - - }, - - CAMELCASE_TO_UNDERSCORE { - - @Override - public String apply(String value) { - if (value.isEmpty()) { - return value; - } - Matcher matcher = CAMEL_CASE_PATTERN.matcher(value); - if (!matcher.find()) { - return value; - } - matcher = matcher.reset(); - StringBuffer result = new StringBuffer(); - while (matcher.find()) { - matcher.appendReplacement(result, - matcher.group(1) + '_' + StringUtils.uncapitalize(matcher.group(2))); - } - matcher.appendTail(result); - return result.toString(); - } - - }, - - CAMELCASE_TO_HYPHEN { - - @Override - public String apply(String value) { - if (value.isEmpty()) { - return value; - } - Matcher matcher = CAMEL_CASE_PATTERN.matcher(value); - if (!matcher.find()) { - return value; - } - matcher = matcher.reset(); - StringBuffer result = new StringBuffer(); - while (matcher.find()) { - matcher.appendReplacement(result, - matcher.group(1) + '-' + StringUtils.uncapitalize(matcher.group(2))); - } - matcher.appendTail(result); - return result.toString(); - } - - }, - - SEPARATED_TO_CAMELCASE { - - @Override - public String apply(String value) { - return separatedToCamelCase(value, false); - } - - }, - - CASE_INSENSITIVE_SEPARATED_TO_CAMELCASE { - - @Override - public String apply(String value) { - return separatedToCamelCase(value, true); - } - - }; - - private static final char[] SUFFIXES = new char[] { '_', '-', '.' }; - - public abstract String apply(String value); - - private static String separatedToCamelCase(String value, boolean caseInsensitive) { - if (value.isEmpty()) { - return value; - } - StringBuilder builder = new StringBuilder(); - for (String field : SEPARATED_TO_CAMEL_CASE_PATTERN.split(value)) { - field = (caseInsensitive ? field.toLowerCase(Locale.ENGLISH) : field); - builder.append(builder.length() != 0 ? StringUtils.capitalize(field) : field); - } - char lastChar = value.charAt(value.length() - 1); - for (char suffix : SUFFIXES) { - if (lastChar == suffix) { - builder.append(suffix); - break; - } - } - return builder.toString(); - } - - } - - /** - * Return a {@link RelaxedNames} for the given source camelCase source name. - * - * @param name the source name in camelCase - * @return the relaxed names - */ - public static RelaxedNames forCamelCase(String name) { - StringBuilder result = new StringBuilder(); - for (char c : name.toCharArray()) { - result.append(Character.isUpperCase(c) && result.length() > 0 - && result.charAt(result.length() - 1) != '-' ? "-" + Character.toLowerCase(c) : c); - } - return new RelaxedNames(result.toString()); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/StringToCharArrayConverter.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/StringToCharArrayConverter.java deleted file mode 100644 index 00a2d808fa..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/bind/StringToCharArrayConverter.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.bind; - -import org.springframework.core.convert.converter.Converter; - -/** - * Converts a String to a Char Array. - * - * @author Phillip Webb - */ -class StringToCharArrayConverter implements Converter { - - @Override - public char[] convert(String source) { - return source.toCharArray(); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/AbstractResource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/AbstractResource.java deleted file mode 100644 index 9cb73e90ff..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/AbstractResource.java +++ /dev/null @@ -1,203 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; - -import org.springframework.util.Assert; -import org.springframework.util.ResourceUtils; - -/** - * Convenience base class for {@link Resource} implementations, pre-implementing - * typical behavior. - *

- * The "exists" method will check whether a File or InputStream can be opened; - * "isOpen" will always return false; "getURL" and "getFile" throw an exception; - * and "toString" will return the description. - * - * @author Juergen Hoeller - * @since 28.12.2003 - */ -public abstract class AbstractResource implements Resource { - - /** - * This implementation checks whether a File can be opened, falling back to - * whether an InputStream can be opened. This will cover both directories and - * content resources. - */ - @Override - public boolean exists() { - // Try file existence: can we find the file in the file system? - try { - return getFile().exists(); - } catch (IOException ex) { - // Fall back to stream existence: can we open the stream? - try { - InputStream is = getInputStream(); - is.close(); - return true; - } catch (Throwable isEx) { - return false; - } - } - } - - /** - * This implementation always returns {@code true}. - */ - @Override - public boolean isReadable() { - return true; - } - - /** - * This implementation always returns {@code false}. - */ - @Override - public boolean isOpen() { - return false; - } - - /** - * This implementation throws a FileNotFoundException, assuming that the - * resource cannot be resolved to a URL. - */ - @Override - public URL getURL() throws IOException { - throw new FileNotFoundException(getDescription() + " cannot be resolved to URL"); - } - - /** - * This implementation builds a URI based on the URL returned by - * {@link #getURL()}. - */ - @Override - public URI getURI() throws IOException { - URL url = getURL(); - try { - return ResourceUtils.toURI(url); - } catch (URISyntaxException ex) { - throw new RuntimeException("Invalid URI [" + url + "]", ex); - } - } - - /** - * This implementation throws a FileNotFoundException, assuming that the - * resource cannot be resolved to an absolute file path. - */ - @Override - public File getFile() throws IOException { - throw new FileNotFoundException(getDescription() + " cannot be resolved to absolute file path"); - } - - /** - * This implementation reads the entire InputStream to calculate the content - * length. Subclasses will almost always be able to provide a more optimal - * version of this, e.g. checking a File length. - * - * @see #getInputStream() - */ - @Override - public long contentLength() throws IOException { - InputStream is = getInputStream(); - Assert.state(is != null, "Resource InputStream must not be null"); - try { - long size = 0; - byte[] buf = new byte[255]; - int read; - while ((read = is.read(buf)) != -1) { - size += read; - } - return size; - } finally { - try { - is.close(); - } catch (IOException ex) { - } - } - } - - /** - * This implementation checks the timestamp of the underlying File, if - * available. - * - * @see #getFileForLastModifiedCheck() - */ - @Override - public long lastModified() throws IOException { - long lastModified = getFileForLastModifiedCheck().lastModified(); - if (lastModified == 0L) { - throw new FileNotFoundException( - getDescription() + " cannot be resolved in the file system for resolving its last-modified timestamp"); - } - return lastModified; - } - - /** - * Determine the File to use for timestamp checking. - *

- * The default implementation delegates to {@link #getFile()}. - * - * @return the File to use for timestamp checking (never {@code null}) - * @throws FileNotFoundException if the resource cannot be resolved as an - * absolute file path, i.e. is not available in a file system - * @throws IOException in case of general resolution/reading failures - */ - protected File getFileForLastModifiedCheck() throws IOException { - return getFile(); - } - - /** - * This implementation throws a FileNotFoundException, assuming that relative - * resources cannot be created for this resource. - */ - @Override - public org.springframework.core.io.Resource createRelative(String relativePath) throws IOException { - throw new FileNotFoundException("Cannot create a relative resource for " + getDescription()); - } - - /** - * This implementation always returns {@code null}, assuming that this resource - * type does not have a filename. - */ - @Override - public String getFilename() { - return null; - } - - /** - * This implementation returns the description of this resource. - * - * @see #getDescription() - */ - @Override - public String toString() { - return getDescription(); - } - - /** - * This implementation compares description strings. - * - * @see #getDescription() - */ - @Override - public boolean equals(Object obj) { - return (obj == this - || (obj instanceof org.springframework.core.io.Resource - && ((org.springframework.core.io.Resource) obj).getDescription().equals(getDescription()))); - } - - /** - * This implementation returns the description's hash code. - * - * @see #getDescription() - */ - @Override - public int hashCode() { - return getDescription().hashCode(); - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java deleted file mode 100644 index c8ec120b91..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/ByteArrayResource.java +++ /dev/null @@ -1,117 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; - -import org.springframework.core.io.InputStreamResource; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; - -/** - * {@link Resource} implementation for a given byte array. - *

- * Creates a {@link ByteArrayInputStream} for the given byte array. - *

- * Useful for loading content from any given byte array, without having to - * resort to a single-use {@link InputStreamResource}. Particularly useful for - * creating mail attachments from local content, where JavaMail needs to be able - * to read the stream multiple times. - * - * @author Juergen Hoeller - * @author Sam Brannen - * @see ByteArrayInputStream - * @see InputStreamResource - * @since 1.2.3 - */ -public class ByteArrayResource extends AbstractResource { - - private final byte[] byteArray; - - private final String description; - - /** - * Create a new {@code ByteArrayResource}. - * - * @param byteArray the byte array to wrap - */ - public ByteArrayResource(byte[] byteArray){ - this(byteArray, "resource loaded from byte array"); - } - - /** - * Create a new {@code ByteArrayResource} with a description. - * - * @param byteArray the byte array to wrap - * @param description where the byte array comes from - */ - public ByteArrayResource(byte[] byteArray, String description){ - Assert.notNull(byteArray, "Byte array must not be null"); - this.byteArray = byteArray; - this.description = (description != null ? description : ""); - } - - /** - * Return the underlying byte array. - */ - public final byte[] getByteArray() { - return this.byteArray; - } - - /** - * This implementation always returns {@code true}. - */ - @Override - public boolean exists() { - return true; - } - - /** - * This implementation returns the length of the underlying byte array. - */ - @Override - public long contentLength() { - return this.byteArray.length; - } - - /** - * This implementation returns a ByteArrayInputStream for the underlying byte - * array. - * - * @see ByteArrayInputStream - */ - @Override - public InputStream getInputStream() throws IOException { - return new ByteArrayInputStream(this.byteArray); - } - - /** - * This implementation returns a description that includes the passed-in - * {@code description}, if any. - */ - @Override - public String getDescription() { - return "Byte array resource [" + this.description + "]"; - } - - /** - * This implementation compares the underlying byte array. - * - * @see Arrays#equals(byte[], byte[]) - */ - @Override - public boolean equals(Object obj) { - return (obj == this || (obj instanceof org.springframework.core.io.ByteArrayResource - && Arrays.equals(((ByteArrayResource) obj).byteArray, this.byteArray))); - } - - /** - * This implementation returns the hash code based on the underlying byte array. - */ - @Override - public int hashCode() { - return (byte[].class.hashCode() * 29 * this.byteArray.length); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/CompositePropertySource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/CompositePropertySource.java deleted file mode 100644 index a60962070c..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/CompositePropertySource.java +++ /dev/null @@ -1,107 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.util.*; - -import org.springframework.util.StringUtils; - -/** - * Composite {@link PropertySource} implementation that iterates over a set of - * {@link PropertySource} instances. Necessary in cases where multiple property - * sources share the same name, e.g. when multiple values are supplied to - * {@code @PropertySource}. - *

- * As of Spring 4.1.2, this class extends {@link EnumerablePropertySource} - * instead of plain {@link PropertySource}, exposing {@link #getPropertyNames()} - * based on the accumulated property names from all contained sources (as far as - * possible). - * - * @author Chris Beams - * @author Juergen Hoeller - * @author Phillip Webb - * @since 3.1.1 - */ -public class CompositePropertySource extends EnumerablePropertySource { - - private final Set> propertySources = new LinkedHashSet<>(); - - /** - * Create a new {@code CompositePropertySource}. - * - * @param name the name of the property source - */ - public CompositePropertySource(String name){ - super(name); - } - - @Override - public Object getProperty(String name) { - for (PropertySource propertySource : this.propertySources) { - Object candidate = propertySource.getProperty(name); - if (candidate != null) { - return candidate; - } - } - return null; - } - - @Override - public boolean containsProperty(String name) { - for (PropertySource propertySource : this.propertySources) { - if (propertySource.containsProperty(name)) { - return true; - } - } - return false; - } - - @Override - public String[] getPropertyNames() { - Set names = new LinkedHashSet<>(); - for (PropertySource propertySource : this.propertySources) { - if (!(propertySource instanceof EnumerablePropertySource)) { - throw new IllegalStateException( - "Failed to enumerate property names due to non-enumerable property source: " + propertySource); - } - names.addAll(Arrays.asList(((EnumerablePropertySource) propertySource).getPropertyNames())); - } - return StringUtils.toStringArray(names); - } - - /** - * Add the given {@link PropertySource} to the end of the chain. - * - * @param propertySource the PropertySource to add - */ - public void addPropertySource(PropertySource propertySource) { - this.propertySources.add(propertySource); - } - - /** - * Add the given {@link PropertySource} to the start of the chain. - * - * @param propertySource the PropertySource to add - * @since 4.1 - */ - public void addFirstPropertySource(PropertySource propertySource) { - List> existing = new ArrayList<>(this.propertySources); - this.propertySources.clear(); - this.propertySources.add(propertySource); - this.propertySources.addAll(existing); - } - - /** - * Return all property sources that this composite source holds. - * - * @since 4.1.1 - */ - public Collection> getPropertySources() { - return this.propertySources; - } - - @Override - public String toString() { - return String - .format("%s [name='%s', propertySources=%s]", getClass().getSimpleName(), this.name, this.propertySources); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/EnumerablePropertySource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/EnumerablePropertySource.java deleted file mode 100644 index e5ce86393a..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/EnumerablePropertySource.java +++ /dev/null @@ -1,58 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import org.springframework.util.ObjectUtils; - -/** - * A {@link PropertySource} implementation capable of interrogating its - * underlying source object to enumerate all possible property name/value pairs. - * Exposes the {@link #getPropertyNames()} method to allow callers to introspect - * available properties without having to access the underlying source object. - * This also facilitates a more efficient implementation of - * {@link #containsProperty(String)}, in that it can call - * {@link #getPropertyNames()} and iterate through the returned array rather - * than attempting a call to {@link #getProperty(String)} which may be more - * expensive. Implementations may consider caching the result of - * {@link #getPropertyNames()} to fully exploit this performance opportunity. - *

- * Most framework-provided {@code PropertySource} implementations are - * enumerable; a counter-example would be {@code JndiPropertySource} where, due - * to the nature of JNDI it is not possible to determine all possible property - * names at any given time; rather it is only possible to try to access a - * property (via {@link #getProperty(String)}) in order to evaluate whether it - * is present or not. - * - * @author Chris Beams - * @author Juergen Hoeller - * @since 3.1 - */ -public abstract class EnumerablePropertySource extends PropertySource { - - public EnumerablePropertySource(String name, T source){ - super(name, source); - } - - protected EnumerablePropertySource(String name){ - super(name); - } - - /** - * Return whether this {@code PropertySource} contains a property with the given - * name. - *

- * This implementation checks for the presence of the given name within the - * {@link #getPropertyNames()} array. - * - * @param name the name of the property to find - */ - @Override - public boolean containsProperty(String name) { - return ObjectUtils.containsElement(getPropertyNames(), name); - } - - /** - * Return the names of all properties contained by the {@linkplain #getSource() - * source} object (never {@code null}). - */ - public abstract String[] getPropertyNames(); - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MapPropertySource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MapPropertySource.java deleted file mode 100644 index eecc0ee433..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MapPropertySource.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.util.Map; - -import org.springframework.core.env.PropertiesPropertySource; -import org.springframework.core.env.PropertySource; -import org.springframework.util.StringUtils; - -/** - * {@link PropertySource} that reads keys and values from a {@code Map} object. - * - * @author Chris Beams - * @author Juergen Hoeller - * @since 3.1 - * @see PropertiesPropertySource - */ -public class MapPropertySource extends EnumerablePropertySource> { - - public MapPropertySource(String name, Map source){ - super(name, source); - } - - @Override - public Object getProperty(String name) { - return this.source.get(name); - } - - @Override - public boolean containsProperty(String name) { - return this.source.containsKey(name); - } - - @Override - public String[] getPropertyNames() { - return StringUtils.toStringArray(this.source.keySet()); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MutablePropertySources.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MutablePropertySources.java deleted file mode 100644 index 78de18c0c6..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/MutablePropertySources.java +++ /dev/null @@ -1,221 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.core.env.PropertyResolver; -import org.springframework.core.env.PropertySourcesPropertyResolver; - -/** - * Default implementation of the {@link PropertySources} interface. Allows - * manipulation of contained property sources and provides a constructor for - * copying an existing {@code PropertySources} instance. - *

- * Where precedence is mentioned in methods such as {@link #addFirst} - * and {@link #addLast}, this is with regard to the order in which property - * sources will be searched when resolving a given property with a - * {@link PropertyResolver}. - * - * @author Chris Beams - * @author Juergen Hoeller - * @see PropertySourcesPropertyResolver - * @since 3.1 - */ -public class MutablePropertySources implements PropertySources { - - private final Log logger; - - private final List> propertySourceList = new CopyOnWriteArrayList<>(); - - /** - * Create a new {@link MutablePropertySources} - * object. - */ - public MutablePropertySources(){ - this.logger = LogFactory.getLog(getClass()); - } - - /** - * Create a new {@code MutablePropertySources} from the given propertySources - * object, preserving the original order of contained {@code PropertySource} - * objects. - */ - public MutablePropertySources(PropertySources propertySources){ - this(); - for (PropertySource propertySource : propertySources) { - addLast(propertySource); - } - } - - /** - * Create a new {@link MutablePropertySources} - * object and inherit the given logger, usually from an enclosing - * {@link Environment}. - */ - MutablePropertySources(Log logger){ - this.logger = logger; - } - - @Override - public boolean contains(String name) { - return this.propertySourceList.contains(PropertySource.named(name)); - } - - @Override - public PropertySource get(String name) { - int index = this.propertySourceList.indexOf(PropertySource.named(name)); - return (index != -1 ? this.propertySourceList.get(index) : null); - } - - @Override - public Iterator> iterator() { - return this.propertySourceList.iterator(); - } - - /** - * Add the given property source object with highest precedence. - */ - public void addFirst(PropertySource propertySource) { - if (logger.isDebugEnabled()) { - logger.debug("Adding PropertySource '" + propertySource.getName() + "' with highest search precedence"); - } - removeIfPresent(propertySource); - this.propertySourceList.add(0, propertySource); - } - - /** - * Add the given property source object with lowest precedence. - */ - public void addLast(PropertySource propertySource) { - if (logger.isDebugEnabled()) { - logger.debug("Adding PropertySource '" + propertySource.getName() + "' with lowest search precedence"); - } - removeIfPresent(propertySource); - this.propertySourceList.add(propertySource); - } - - /** - * Add the given property source object with precedence immediately higher than - * the named relative property source. - */ - public void addBefore(String relativePropertySourceName, PropertySource propertySource) { - if (logger.isDebugEnabled()) { - logger.debug("Adding PropertySource '" + propertySource.getName() - + "' with search precedence immediately higher than '" + relativePropertySourceName + "'"); - } - assertLegalRelativeAddition(relativePropertySourceName, propertySource); - removeIfPresent(propertySource); - int index = assertPresentAndGetIndex(relativePropertySourceName); - addAtIndex(index, propertySource); - } - - /** - * Add the given property source object with precedence immediately lower than - * the named relative property source. - */ - public void addAfter(String relativePropertySourceName, PropertySource propertySource) { - if (logger.isDebugEnabled()) { - logger.debug("Adding PropertySource '" + propertySource.getName() - + "' with search precedence immediately lower than '" + relativePropertySourceName + "'"); - } - assertLegalRelativeAddition(relativePropertySourceName, propertySource); - removeIfPresent(propertySource); - int index = assertPresentAndGetIndex(relativePropertySourceName); - addAtIndex(index + 1, propertySource); - } - - /** - * Return the precedence of the given property source, {@code -1} if not found. - */ - public int precedenceOf(PropertySource propertySource) { - return this.propertySourceList.indexOf(propertySource); - } - - /** - * Remove and return the property source with the given name, {@code null} if - * not found. - * - * @param name the name of the property source to find and remove - */ - public PropertySource remove(String name) { - if (logger.isDebugEnabled()) { - logger.debug("Removing PropertySource '" + name + "'"); - } - int index = this.propertySourceList.indexOf(PropertySource.named(name)); - return (index != -1 ? this.propertySourceList.remove(index) : null); - } - - /** - * Replace the property source with the given name with the given property - * source object. - * - * @param name the name of the property source to find and replace - * @param propertySource the replacement property source - * @throws IllegalArgumentException if no property source with the given name is - * present - * @see #contains - */ - public void replace(String name, PropertySource propertySource) { - if (logger.isDebugEnabled()) { - logger.debug("Replacing PropertySource '" + name + "' with '" + propertySource.getName() + "'"); - } - int index = assertPresentAndGetIndex(name); - this.propertySourceList.set(index, propertySource); - } - - /** - * Return the number of {@link PropertySource} objects contained. - */ - public int size() { - return this.propertySourceList.size(); - } - - @Override - public String toString() { - return this.propertySourceList.toString(); - } - - /** - * Ensure that the given property source is not being added relative to itself. - */ - protected void assertLegalRelativeAddition(String relativePropertySourceName, PropertySource propertySource) { - String newPropertySourceName = propertySource.getName(); - if (relativePropertySourceName.equals(newPropertySourceName)) { - throw new IllegalArgumentException( - "PropertySource named '" + newPropertySourceName + "' cannot be added relative to itself"); - } - } - - /** - * Remove the given property source if it is present. - */ - protected void removeIfPresent(PropertySource propertySource) { - this.propertySourceList.remove(propertySource); - } - - /** - * Add the given property source at a particular index in the list. - */ - private void addAtIndex(int index, PropertySource propertySource) { - removeIfPresent(propertySource); - this.propertySourceList.add(index, propertySource); - } - - /** - * Assert that the named property source is present and return its index. - * - * @param name {@linkplain PropertySource#getName() name of the property source} - * to find - * @throws IllegalArgumentException if the named property source is not present - */ - private int assertPresentAndGetIndex(String name) { - int index = this.propertySourceList.indexOf(PropertySource.named(name)); - if (index == -1) { - throw new IllegalArgumentException("PropertySource named '" + name + "' does not exist"); - } - return index; - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertiesPropertySource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertiesPropertySource.java deleted file mode 100644 index 12fe686af4..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertiesPropertySource.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.util.Map; -import java.util.Properties; - -import org.springframework.core.env.PropertySource; - -/** - * {@link PropertySource} implementation that extracts properties from a - * {@link Properties} object. - *

- * Note that because a {@code Properties} object is technically an - * {@code } {@link java.util.Hashtable Hashtable}, one may - * contain non-{@code String} keys or values. This implementation, however is - * restricted to accessing only {@code String}-based keys and values, in the - * same fashion as {@link Properties#getProperty} and - * {@link Properties#setProperty}. - * - * @author Chris Beams - * @author Juergen Hoeller - * @since 3.1 - */ -public class PropertiesPropertySource extends MapPropertySource { - - @SuppressWarnings({ "unchecked", "rawtypes" }) - public PropertiesPropertySource(String name, Properties source){ - super(name, (Map) source); - } - - protected PropertiesPropertySource(String name, Map source){ - super(name, source); - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySource.java deleted file mode 100644 index f7e4cc3401..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySource.java +++ /dev/null @@ -1,239 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.util.Assert; -import org.springframework.util.ObjectUtils; - -/** - * Abstract base class representing a source of name/value property pairs. The - * underlying {@linkplain #getSource() source object} may be of any type - * {@code T} that encapsulates properties. Examples include - * {@link java.util.Properties} objects, {@link java.util.Map} objects, - * {@code ServletContext} and {@code ServletConfig} objects (for access to init - * parameters). Explore the {@code PropertySource} type hierarchy to see - * provided implementations. - *

- * {@code PropertySource} objects are not typically used in isolation, but - * rather through a {@link PropertySources} object, which aggregates property - * sources and in conjunction with a {@link PropertyResolver} implementation - * that can perform precedence-based searches across the set of - * {@code PropertySources}. - *

- * {@code PropertySource} identity is determined not based on the content of - * encapsulated properties, but rather based on the {@link #getName() name} of - * the {@code PropertySource} alone. This is useful for manipulating - * {@code PropertySource} objects when in collection contexts. See operations in - * {@link MutablePropertySources} as well as the {@link #named(String)} and - * {@link #toString()} methods for details. - *

- * Note that when working - * with @{@link org.springframework.context.annotation.Configuration - * Configuration} classes that the @{@link PropertySource PropertySource} - * annotation provides a convenient and declarative way of adding property - * sources to the enclosing {@code Environment}. - * - * @author Chris Beams - * @since 3.1 - * @see PropertySources - * @see MutablePropertySources - * @see PropertySource - */ -public abstract class PropertySource { - - protected final Log logger = LogFactory.getLog(getClass()); - - protected final String name; - - protected final T source; - - /** - * Create a new {@code PropertySource} with the given name and source object. - */ - public PropertySource(String name, T source){ - Assert.hasText(name, "Property source name must contain at least one character"); - Assert.notNull(source, "Property source must not be null"); - this.name = name; - this.source = source; - } - - /** - * Create a new {@code PropertySource} with the given name and with a new - * {@code Object} instance as the underlying source. - *

- * Often useful in testing scenarios when creating anonymous implementations - * that never query an actual source but rather return hard-coded values. - */ - @SuppressWarnings("unchecked") - public PropertySource(String name){ - this(name, (T) new Object()); - } - - /** - * Return the name of this {@code PropertySource} - */ - public String getName() { - return this.name; - } - - /** - * Return the underlying source object for this {@code PropertySource}. - */ - public T getSource() { - return this.source; - } - - /** - * Return whether this {@code PropertySource} contains the given name. - *

- * This implementation simply checks for a {@code null} return value from - * {@link #getProperty(String)}. Subclasses may wish to implement a more - * efficient algorithm if possible. - * - * @param name the property name to find - */ - public boolean containsProperty(String name) { - return (getProperty(name) != null); - } - - /** - * Return the value associated with the given name, or {@code null} if not - * found. - * - * @param name the property to find - */ - public abstract Object getProperty(String name); - - /** - * This {@code PropertySource} object is equal to the given object if: - *

    - *
  • they are the same instance - *
  • the {@code name} properties for both objects are equal - *
- *

- * No properties other than {@code name} are evaluated. - */ - @Override - public boolean equals(Object obj) { - return (this == obj || (obj instanceof PropertySource - && ObjectUtils.nullSafeEquals(this.name, ((PropertySource) obj).name))); - } - - /** - * Return a hash code derived from the {@code name} property of this - * {@code PropertySource} object. - */ - @Override - public int hashCode() { - return ObjectUtils.nullSafeHashCode(this.name); - } - - /** - * Produce concise output (type and name) if the current log level does not - * include debug. If debug is enabled, produce verbose output including the hash - * code of the PropertySource instance and every name/value property pair. - *

- * This variable verbosity is useful as a property source such as system - * properties or environment variables may contain an arbitrary number of - * property pairs, potentially leading to difficult to read exception and log - * messages. - * - * @see Log#isDebugEnabled() - */ - @Override - public String toString() { - if (logger.isDebugEnabled()) { - return getClass().getSimpleName() + "@" + System.identityHashCode(this) + " {name='" + this.name - + "', properties=" + this.source + "}"; - } else { - return getClass().getSimpleName() + " {name='" + this.name + "'}"; - } - } - - /** - * Return a {@code PropertySource} implementation intended for collection - * comparison purposes only. - *

- * Primarily for internal use, but given a collection of {@code PropertySource} - * objects, may be used as follows: - * - *

-     *
-     * {
-     *     @code
-     *     List> sources = new ArrayList>();
-     *     sources.add(new MapPropertySource("sourceA", mapA));
-     *     sources.add(new MapPropertySource("sourceB", mapB));
-     *     assert sources.contains(PropertySource.named("sourceA"));
-     *     assert sources.contains(PropertySource.named("sourceB"));
-     *     assert !sources.contains(PropertySource.named("sourceC"));
-     * }
-     * 
- * - * The returned {@code PropertySource} will throw - * {@code UnsupportedOperationException} if any methods other than - * {@code equals(Object)}, {@code hashCode()}, and {@code toString()} are - * called. - * - * @param name the name of the comparison {@code PropertySource} to be created - * and returned. - */ - public static PropertySource named(String name) { - return new ComparisonPropertySource(name); - } - - /** - * {@code PropertySource} to be used as a placeholder in cases where an actual - * property source cannot be eagerly initialized at application context creation - * time. For example, a {@code ServletContext}-based property source must wait - * until the {@code ServletContext} object is available to its enclosing - * {@code ApplicationContext}. In such cases, a stub should be used to hold the - * intended default position/order of the property source, then be replaced - * during context refresh. - * - * @see org.springframework.web.context.support.StandardServletEnvironment - * @see org.springframework.web.context.support.ServletContextPropertySource - */ - public static class StubPropertySource extends PropertySource { - - public StubPropertySource(String name){ - super(name, new Object()); - } - - /** - * Always returns {@code null}. - */ - @Override - public String getProperty(String name) { - return null; - } - } - - /** - * @see PropertySource#named(String) - */ - static class ComparisonPropertySource extends StubPropertySource { - - private static final String USAGE_ERROR = "ComparisonPropertySource instances are for use with collection comparison only"; - - public ComparisonPropertySource(String name){ - super(name); - } - - @Override - public Object getSource() { - throw new UnsupportedOperationException(USAGE_ERROR); - } - - @Override - public boolean containsProperty(String name) { - throw new UnsupportedOperationException(USAGE_ERROR); - } - - @Override - public String getProperty(String name) { - throw new UnsupportedOperationException(USAGE_ERROR); - } - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySourceLoader.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySourceLoader.java deleted file mode 100644 index 982f61b81f..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySourceLoader.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.IOException; - -import org.springframework.core.io.support.SpringFactoriesLoader; - -/** - * Strategy interface located via {@link SpringFactoriesLoader} and used to load - * a {@link PropertySource}. - * - * @author Dave Syer - * @author Phillip Webb - */ -public interface PropertySourceLoader { - - /** - * Returns the file extensions that the loader supports (excluding the '.'). - * - * @return the file extensions - */ - String[] getFileExtensions(); - - /** - * Load the resource into a property source. - * - * @param name the name of the property source - * @param resource the resource to load - * @param profile the name of the profile to load or {@code null}. The profile - * can be used to load multi-document files (such as YAML). Simple property - * formats should {@code null} when asked to load a profile. - * @return a property source or {@code null} - * @throws IOException if the source cannot be loaded - */ - PropertySource load(String name, Resource resource, String profile) throws IOException; -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySources.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySources.java deleted file mode 100644 index 3cd08c9a42..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/PropertySources.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -/** - * Holder containing one or more {@link PropertySource} objects. - * - * @author Chris Beams - * @since 3.1 - */ -public interface PropertySources extends Iterable> { - - /** - * Return whether a property source with the given name is contained. - * - * @param name the {@linkplain PropertySource#getName() name of the property source} to find - */ - boolean contains(String name); - - /** - * Return the property source with the given name, {@code null} if not found. - * - * @param name the {@linkplain PropertySource#getName() name of the property source} to find - */ - PropertySource get(String name); - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/Resource.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/Resource.java deleted file mode 100644 index 88301f20d3..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/Resource.java +++ /dev/null @@ -1,57 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URL; - -import org.springframework.core.io.*; -import org.springframework.core.io.ByteArrayResource; - -/** - * Interface for a resource descriptor that abstracts from the actual type of - * underlying resource, such as a file or class path resource. - *

- * An InputStream can be opened for every resource if it exists in physical - * form, but a URL or File handle can just be returned for certain resources. - * The actual behavior is implementation-specific. - * - * @author Juergen Hoeller - * @since 28.12.2003 - * @see #getInputStream() - * @see #getURL() - * @see #getURI() - * @see #getFile() - * @see WritableResource - * @see ContextResource - * @see UrlResource - * @see ClassPathResource - * @see FileSystemResource - * @see PathResource - * @see ByteArrayResource - * @see InputStreamResource - */ -public interface Resource extends InputStreamSource { - - boolean exists(); - - boolean isReadable(); - - boolean isOpen(); - - URL getURL() throws IOException; - - URI getURI() throws IOException; - - File getFile() throws IOException; - - long contentLength() throws IOException; - - long lastModified() throws IOException; - - org.springframework.core.io.Resource createRelative(String var1) throws IOException; - - String getFilename(); - - String getDescription(); -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/SpringProfileDocumentMatcher.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/SpringProfileDocumentMatcher.java deleted file mode 100644 index df25eba143..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/SpringProfileDocumentMatcher.java +++ /dev/null @@ -1,182 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.util.*; - -import org.springframework.core.env.Environment; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -/** - * {@link YamlProcessor.DocumentMatcher} backed by - * {@link Environment#getActiveProfiles()}. A YAML document may define a - * "spring.profiles" element as a comma-separated list of Spring profile names, - * optionally negated using the {@code !} character. If both negated and - * non-negated profiles are specified for a single document, at least one - * non-negated profile must match and no negated profiles may match. - * - * @author Dave Syer - * @author Matt Benson - * @author Phillip Webb - * @author Andy Wilkinson - */ -public class SpringProfileDocumentMatcher implements YamlProcessor.DocumentMatcher { - - private String[] activeProfiles = new String[0]; - - public SpringProfileDocumentMatcher(){ - } - - public SpringProfileDocumentMatcher(String... profiles){ - addActiveProfiles(profiles); - } - - public void addActiveProfiles(String... profiles) { - LinkedHashSet set = new LinkedHashSet<>(Arrays.asList(this.activeProfiles)); - Collections.addAll(set, profiles); - this.activeProfiles = set.toArray(new String[set.size()]); - } - - @Override - public YamlProcessor.MatchStatus matches(Properties properties) { - List profiles = extractSpringProfiles(properties); - ProfilesMatcher profilesMatcher = getProfilesMatcher(); - Set negative = extractProfiles(profiles, ProfileType.NEGATIVE); - Set positive = extractProfiles(profiles, ProfileType.POSITIVE); - if (!CollectionUtils.isEmpty(negative)) { - if (profilesMatcher.matches(negative) == YamlProcessor.MatchStatus.FOUND) { - return YamlProcessor.MatchStatus.NOT_FOUND; - } - if (CollectionUtils.isEmpty(positive)) { - return YamlProcessor.MatchStatus.FOUND; - } - } - return profilesMatcher.matches(positive); - } - - private List extractSpringProfiles(Properties properties) { - SpringProperties springProperties = new SpringProperties(); - MutablePropertySources propertySources = new MutablePropertySources(); - propertySources.addFirst(new PropertiesPropertySource("profiles", properties)); - // PropertyValues propertyValues = new PropertySourcesPropertyValues( - // propertySources); - // new RelaxedDataBinder(springProperties, "spring").bind(propertyValues); - // TODO - List profiles = springProperties.getProfiles(); - return profiles; - } - - private ProfilesMatcher getProfilesMatcher() { - return (this.activeProfiles.length != 0 ? new ActiveProfilesMatcher( - new HashSet<>(Arrays.asList(this.activeProfiles))) : new EmptyProfilesMatcher()); - } - - private Set extractProfiles(List profiles, ProfileType type) { - if (CollectionUtils.isEmpty(profiles)) { - return null; - } - Set extractedProfiles = new HashSet<>(); - for (String candidate : profiles) { - ProfileType candidateType = ProfileType.POSITIVE; - if (candidate.startsWith("!")) { - candidateType = ProfileType.NEGATIVE; - } - if (candidateType == type) { - extractedProfiles.add(type != ProfileType.POSITIVE ? candidate.substring(1) : candidate); - } - } - return extractedProfiles; - } - - /** - * Profile match types. - */ - enum ProfileType { - - POSITIVE, NEGATIVE - - } - - /** - * Base class for profile matchers. - */ - private abstract static class ProfilesMatcher { - - public final YamlProcessor.MatchStatus matches(Set profiles) { - if (CollectionUtils.isEmpty(profiles)) { - return YamlProcessor.MatchStatus.ABSTAIN; - } - return doMatches(profiles); - } - - protected abstract YamlProcessor.MatchStatus doMatches(Set profiles); - - } - - /** - * {@link ProfilesMatcher} that matches when a value in {@code spring.profiles} - * is also in {@code spring.profiles.active}. - */ - private static class ActiveProfilesMatcher extends ProfilesMatcher { - - private final Set activeProfiles; - - ActiveProfilesMatcher(Set activeProfiles){ - this.activeProfiles = activeProfiles; - } - - @Override - protected YamlProcessor.MatchStatus doMatches(Set profiles) { - if (profiles.isEmpty()) { - return YamlProcessor.MatchStatus.NOT_FOUND; - } - for (String activeProfile : this.activeProfiles) { - if (profiles.contains(activeProfile)) { - return YamlProcessor.MatchStatus.FOUND; - } - } - return YamlProcessor.MatchStatus.NOT_FOUND; - } - - } - - /** - * {@link ProfilesMatcher} that matches when {@code - * spring.profiles} is empty or contains a value with no text. - * - * @see StringUtils#hasText(String) - */ - private static class EmptyProfilesMatcher extends ProfilesMatcher { - - @Override - public YamlProcessor.MatchStatus doMatches(Set springProfiles) { - if (springProfiles.isEmpty()) { - return YamlProcessor.MatchStatus.FOUND; - } - for (String profile : springProfiles) { - if (!StringUtils.hasText(profile)) { - return YamlProcessor.MatchStatus.FOUND; - } - } - return YamlProcessor.MatchStatus.NOT_FOUND; - } - - } - - /** - * Class for binding {@code spring.profiles} property. - */ - static class SpringProperties { - - private List profiles = new ArrayList<>(); - - public List getProfiles() { - return this.profiles; - } - - public void setProfiles(List profiles) { - this.profiles = profiles; - } - - } - -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java deleted file mode 100644 index 18c5ebb8fd..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlProcessor.java +++ /dev/null @@ -1,417 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.IOException; -import java.io.Reader; -import java.util.*; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.util.Assert; -import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.constructor.Constructor; -import org.yaml.snakeyaml.nodes.MappingNode; -import org.yaml.snakeyaml.parser.ParserException; -import org.yaml.snakeyaml.reader.UnicodeReader; - -/** - * Base class for YAML factories. - * - * @author Dave Syer - * @author Juergen Hoeller - * @since 4.1 - */ -public abstract class YamlProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - - private ResolutionMethod resolutionMethod = ResolutionMethod.OVERRIDE; - - private Resource[] resources = new Resource[0]; - - private List documentMatchers = Collections.emptyList(); - - private boolean matchDefault = true; - - /** - * A map of document matchers allowing callers to selectively use only some of - * the documents in a YAML resource. In YAML documents are separated by - * --- lines, and each document is converted to properties before - * the match is made. E.g. - * - *

-     * environment: dev
-     * url: http://dev.bar.com
-     * name: Developer Setup
-     * ---
-     * environment: prod
-     * url:http://foo.bar.com
-     * name: My Cool App
-     * 
- * - * when mapped with - * - *
-     * setDocumentMatchers(properties -> ("prod"
-     *     .equals(properties.getProperty("environment")) ? MatchStatus.FOUND : MatchStatus.NOT_FOUND));
-     * 
- * - * would end up as - * - *
-     * environment=prod
-     * url=http://foo.bar.com
-     * name=My Cool App
-     * 
- */ - public void setDocumentMatchers(DocumentMatcher... matchers) { - this.documentMatchers = Arrays.asList(matchers); - } - - /** - * Flag indicating that a document for which all the - * {@link #setDocumentMatchers(DocumentMatcher...) document matchers} abstain - * will nevertheless match. Default is {@code true}. - */ - public void setMatchDefault(boolean matchDefault) { - this.matchDefault = matchDefault; - } - - /** - * Method to use for resolving resources. Each resource will be converted to a - * Map, so this property is used to decide which map entries to keep in the - * final output from this factory. Default is {@link ResolutionMethod#OVERRIDE}. - */ - public void setResolutionMethod(ResolutionMethod resolutionMethod) { - Assert.notNull(resolutionMethod, "ResolutionMethod must not be null"); - this.resolutionMethod = resolutionMethod; - } - - /** - * Set locations of YAML {@link Resource resources} to be loaded. - * - * @see ResolutionMethod - */ - public void setResources(Resource... resources) { - this.resources = resources; - } - - /** - * Provide an opportunity for subclasses to process the Yaml parsed from the - * supplied resources. Each resource is parsed in turn and the documents inside - * checked against the {@link #setDocumentMatchers(DocumentMatcher...) - * matchers}. If a document matches it is passed into the callback, along with - * its representation as Properties. Depending on the - * {@link #setResolutionMethod(ResolutionMethod)} not all of the documents will - * be parsed. - * - * @param callback a callback to delegate to once matching documents are found - * @see #createYaml() - */ - protected void process(MatchCallback callback) { - Yaml yaml = createYaml(); - for (Resource resource : this.resources) { - boolean found = process(callback, yaml, resource); - if (this.resolutionMethod == ResolutionMethod.FIRST_FOUND && found) { - return; - } - } - } - - /** - * Create the {@link Yaml} instance to use. - */ - protected Yaml createYaml() { - return new Yaml(new StrictMapAppenderConstructor()); - } - - private boolean process(MatchCallback callback, Yaml yaml, Resource resource) { - int count = 0; - try { - if (logger.isDebugEnabled()) { - logger.debug("Loading from YAML: " + resource); - } - try (Reader reader = new UnicodeReader(resource.getInputStream())) { - for (Object object : yaml.loadAll(reader)) { - if (object != null && process(asMap(object), callback)) { - count++; - if (this.resolutionMethod == ResolutionMethod.FIRST_FOUND) { - break; - } - } - } - if (logger.isDebugEnabled()) { - logger.debug( - "Loaded " + count + " document" + (count > 1 ? "s" : "") + " from YAML resource: " + resource); - } - } - } catch (IOException ex) { - handleProcessError(resource, ex); - } - return (count > 0); - } - - private void handleProcessError(Resource resource, IOException ex) { - if (this.resolutionMethod != ResolutionMethod.FIRST_FOUND - && this.resolutionMethod != ResolutionMethod.OVERRIDE_AND_IGNORE) { - throw new IllegalStateException(ex); - } - if (logger.isWarnEnabled()) { - logger.warn("Could not load map from " + resource + ": " + ex.getMessage()); - } - } - - @SuppressWarnings("unchecked") - private Map asMap(Object object) { - // YAML can have numbers as keys - Map result = new LinkedHashMap<>(); - if (!(object instanceof Map)) { - // A document can be a text literal - result.put("document", object); - return result; - } - - Map map = (Map) object; - for (Map.Entry entry : map.entrySet()) { - Object value = entry.getValue(); - if (value instanceof Map) { - value = asMap(value); - } - Object key = entry.getKey(); - if (key instanceof CharSequence) { - result.put(key.toString(), value); - } else { - // It has to be a map key in this case - result.put("[" + key.toString() + "]", value); - } - } - return result; - } - - private boolean process(Map map, MatchCallback callback) { - @SuppressWarnings("serial") - Properties properties = new Properties() { - - @Override - public String getProperty(String key) { - Object value = get(key); - return (value != null ? value.toString() : null); - } - }; - properties.putAll(getFlattenedMap(map)); - - if (this.documentMatchers.isEmpty()) { - if (logger.isDebugEnabled()) { - logger.debug("Merging document (no matchers set): " + map); - } - callback.process(properties, map); - return true; - } - - MatchStatus result = MatchStatus.ABSTAIN; - for (DocumentMatcher matcher : this.documentMatchers) { - MatchStatus match = matcher.matches(properties); - result = MatchStatus.getMostSpecific(match, result); - if (match == MatchStatus.FOUND) { - if (logger.isDebugEnabled()) { - logger.debug("Matched document with document matcher: " + properties); - } - callback.process(properties, map); - return true; - } - } - - if (result == MatchStatus.ABSTAIN && this.matchDefault) { - if (logger.isDebugEnabled()) { - logger.debug("Matched document with default matcher: " + map); - } - callback.process(properties, map); - return true; - } - - if (logger.isDebugEnabled()) { - logger.debug("Unmatched document: " + map); - } - return false; - } - - /** - * Return a flattened version of the given map, recursively following any nested - * Map or Collection values. Entries from the resulting map retain the same - * order as the source. When called with the Map from a {@link MatchCallback} - * the result will contain the same values as the {@link MatchCallback} - * Properties. - * - * @param source the source map - * @return a flattened map - * @since 4.1.3 - */ - protected final Map getFlattenedMap(Map source) { - Map result = new LinkedHashMap<>(); - buildFlattenedMap(result, source, null); - return result; - } - - private static boolean containsText(CharSequence str) { - int strLen = str.length(); - for (int i = 0; i < strLen; i++) { - if (!Character.isWhitespace(str.charAt(i))) { - return true; - } - } - return false; - } - - private void buildFlattenedMap(Map result, Map source, String path) { - for (Map.Entry entry : source.entrySet()) { - String key = entry.getKey(); - if (path != null && !path.isEmpty() && containsText(path)) { - if (key.startsWith("[")) { - key = path + key; - } else { - key = path + '.' + key; - } - } - Object value = entry.getValue(); - if (value instanceof String) { - result.put(key, value); - } else if (value instanceof Map) { - // Need a compound key - @SuppressWarnings("unchecked") - Map map = (Map) value; - buildFlattenedMap(result, map, key); - } else if (value instanceof Collection) { - // Need a compound key - @SuppressWarnings("unchecked") - Collection collection = (Collection) value; - int count = 0; - for (Object object : collection) { - buildFlattenedMap(result, Collections.singletonMap("[" + (count++) + "]", object), key); - } - } else { - result.put(key, (value != null ? value : "")); - } - } - } - - /** - * Callback interface used to process the YAML parsing results. - */ - public interface MatchCallback { - - /** - * Process the given representation of the parsing results. - * - * @param properties the properties to process (as a flattened representation - * with indexed keys in case of a collection or map) - * @param map the result map (preserving the original value structure in the - * YAML document) - */ - void process(Properties properties, Map map); - } - - /** - * Strategy interface used to test if properties match. - */ - public interface DocumentMatcher { - - /** - * Test if the given properties match. - * - * @param properties the properties to test - * @return the status of the match - */ - MatchStatus matches(Properties properties); - } - - /** - * Status returned from {@link DocumentMatcher#matches(Properties)} - */ - public enum MatchStatus { - - /** - * A match was found. - */ - FOUND, - - /** - * No match was found. - */ - NOT_FOUND, - - /** - * The matcher should not be considered. - */ - ABSTAIN; - - /** - * Compare two {@link MatchStatus} items, returning the most specific status. - */ - public static MatchStatus getMostSpecific(MatchStatus a, MatchStatus b) { - return (a.ordinal() < b.ordinal() ? a : b); - } - } - - /** - * Method to use for resolving resources. - */ - public enum ResolutionMethod { - - /** - * Replace values from earlier in the list. - */ - OVERRIDE, - - /** - * Replace values from earlier in the list, ignoring any failures. - */ - OVERRIDE_AND_IGNORE, - - /** - * Take the first resource in the list that exists and use just that. - */ - FIRST_FOUND - } - - /** - * A specialized {@link Constructor} that checks for duplicate keys. - */ - protected static class StrictMapAppenderConstructor extends Constructor { - - // Declared as public for use in subclasses - public StrictMapAppenderConstructor(){ - super(); - } - - @Override - protected Map constructMapping(MappingNode node) { - try { - return super.constructMapping(node); - } catch (IllegalStateException ex) { - throw new ParserException("while parsing MappingNode", - node.getStartMark(), - ex.getMessage(), - node.getEndMark()); - } - } - - @Override - protected Map createDefaultMap() { - final Map delegate = super.createDefaultMap(); - return new AbstractMap() { - - @Override - public Object put(Object key, Object value) { - if (delegate.containsKey(key)) { - throw new IllegalStateException("Duplicate key: " + key); - } - return delegate.put(key, value); - } - - @Override - public Set> entrySet() { - return delegate.entrySet(); - } - }; - } - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlPropertySourceLoader.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlPropertySourceLoader.java deleted file mode 100644 index 57c15d1b65..0000000000 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/config/common/YamlPropertySourceLoader.java +++ /dev/null @@ -1,80 +0,0 @@ -package com.alibaba.otter.canal.client.adapter.config.common; - -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.regex.Pattern; - -import org.springframework.util.ClassUtils; -import org.yaml.snakeyaml.DumperOptions; -import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.nodes.Tag; -import org.yaml.snakeyaml.representer.Representer; -import org.yaml.snakeyaml.resolver.Resolver; - -/** - * Strategy to load '.yml' (or '.yaml') files into a {@link PropertySource}. - * - * @author Dave Syer - * @author Phillip Webb - * @author Andy Wilkinson - */ -public class YamlPropertySourceLoader implements PropertySourceLoader { - - @Override - public String[] getFileExtensions() { - return new String[] { "yml", "yaml" }; - } - - @Override - public PropertySource load(String name, Resource resource, String profile) throws IOException { - if (ClassUtils.isPresent("org.yaml.snakeyaml.Yaml", null)) { - Processor processor = new Processor(resource, profile); - Map source = processor.process(); - if (!source.isEmpty()) { - return new MapPropertySource(name, source); - } - } - return null; - } - - /** - * {@link YamlProcessor} to create a {@link Map} containing the property values. - * Similar to {@link YamlPropertiesFactoryBean} but retains the order of - * entries. - */ - private static class Processor extends YamlProcessor { - - Processor(Resource resource, String profile){ - if (profile == null) { - setMatchDefault(true); - setDocumentMatchers(new SpringProfileDocumentMatcher()); - } else { - setMatchDefault(false); - setDocumentMatchers(new SpringProfileDocumentMatcher(profile)); - } - setResources(resource); - } - - @Override - protected Yaml createYaml() { - return new Yaml(new StrictMapAppenderConstructor(), new Representer(), new DumperOptions(), new Resolver() { - - @Override - public void addImplicitResolver(Tag tag, Pattern regexp, String first) { - if (tag == Tag.TIMESTAMP) { - return; - } - super.addImplicitResolver(tag, regexp, first); - } - }); - } - - public Map process() { - final Map result = new LinkedHashMap<>(); - process((properties, map) -> result.putAll(getFlattenedMap(map))); - return result; - } - - } -} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/FileName2KeyMapping.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/FileName2KeyMapping.java new file mode 100644 index 0000000000..000da89a15 --- /dev/null +++ b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/FileName2KeyMapping.java @@ -0,0 +1,28 @@ +package com.alibaba.otter.canal.client.adapter.support; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Created by @author zhuchao on @date 2021/11/11. + */ +public class FileName2KeyMapping { + + private static Map MAP = new ConcurrentHashMap<>(); + + public static void register(String type, String fileName, String key) { + MAP.putIfAbsent(join(type, fileName), key); + } + + public static void unregister(String type, String fileName) { + MAP.remove(join(type, fileName)); + } + + public static String getKey(String type, String fileName) { + return MAP.get(join(type, fileName)); + } + + private static String join(String type, String fileName) { + return type + "|" + fileName; + } +} diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java index a7c8d7d6cc..9eca0a5461 100644 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java +++ b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/MappingConfigsLoader.java @@ -55,7 +55,7 @@ public static String loadConfig(String name) { } if (filePath.exists()) { String fileName = filePath.getName(); - if (!fileName.endsWith(".yml")) { + if (!(fileName.endsWith(".yml") || fileName.endsWith(".yaml"))) { return null; } try (InputStream in = new FileInputStream(filePath)) { diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java index 553ab664de..e81970672a 100644 --- a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java +++ b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/Util.java @@ -30,6 +30,8 @@ public class Util { private static final Logger logger = LoggerFactory.getLogger(Util.class); + public static final String AUTO_GENERATED_PREFIX = "AUTO_GENERATED_"; + /** * 通过DS执行sql */ diff --git a/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/YamlUtils.java b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/YamlUtils.java new file mode 100644 index 0000000000..0c95410453 --- /dev/null +++ b/client-adapter/common/src/main/java/com/alibaba/otter/canal/client/adapter/support/YamlUtils.java @@ -0,0 +1,96 @@ +package com.alibaba.otter.canal.client.adapter.support; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import org.apache.commons.lang.StringUtils; +import org.springframework.boot.context.properties.bind.Bindable; +import org.springframework.boot.context.properties.bind.Binder; +import org.springframework.boot.context.properties.source.ConfigurationPropertySource; +import org.springframework.boot.context.properties.source.MapConfigurationPropertySource; +import org.springframework.boot.env.YamlPropertySourceLoader; +import org.springframework.boot.origin.OriginTrackedValue; +import org.springframework.core.env.PropertySource; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; + +public class YamlUtils { + + public static T resourceYmlToObj(String resource, String prefix, Class clazz) { + ClassPathResource classPathResource = new ClassPathResource(resource); + + String content; + try (InputStream inputStream = classPathResource.getInputStream()) { + + ByteArrayOutputStream result = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int length; + while ((length = inputStream.read(buffer)) != -1) { + result.write(buffer, 0, length); + } + content = result.toString("UTF-8"); + } catch (IOException e) { + throw new RuntimeException(e); + } + return ymlToObj(prefix, content, clazz); + } + + public static T ymlToObj(String content, Class clazz) { + return ymlToObj("", content, clazz, null, null); + } + + public static T ymlToObj(String prefix, String content, Class clazz) { + return ymlToObj(prefix, content, clazz, null, null); + } + + public static T ymlToObj(String prefix, String content, Class clazz, String charset) { + return ymlToObj(prefix, content, clazz, charset, null); + } + + public static T ymlToObj(String prefix, String content, Class clazz, String charset, + Properties baseProperties) { + try { + prefix = StringUtils.trimToEmpty(prefix); + byte[] contentBytes; + if (charset == null) { + contentBytes = content.getBytes(StandardCharsets.UTF_8); + } else { + contentBytes = content.getBytes(charset); + } + YamlPropertySourceLoader propertySourceLoader = new YamlPropertySourceLoader(); + Resource configResource = new ByteArrayResource(contentBytes); + List> propertySources = propertySourceLoader.load("manualBindConfig", configResource); + + if (propertySources == null || propertySources.isEmpty()) { + return null; + } + + PropertySource propertySource = propertySources.get(0); + + Properties properties = new Properties(); + if (baseProperties != null) { + properties.putAll(baseProperties); + } + + properties.putAll((Map) propertySource.getSource()); + + for (Map.Entry entry : properties.entrySet()) { + Object value = entry.getValue(); + if (value instanceof OriginTrackedValue) { + entry.setValue(((OriginTrackedValue) value).getValue()); + } + } + + ConfigurationPropertySource sources = new MapConfigurationPropertySource(properties); + Binder binder = new Binder(sources); + return binder.bind(prefix, Bindable.of(clazz)).get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/client-adapter/common/src/test/java/com/alibaba/otter/canal/client/adapter/support/YamlUtilsTest.java b/client-adapter/common/src/test/java/com/alibaba/otter/canal/client/adapter/support/YamlUtilsTest.java new file mode 100644 index 0000000000..2246c3c1c8 --- /dev/null +++ b/client-adapter/common/src/test/java/com/alibaba/otter/canal/client/adapter/support/YamlUtilsTest.java @@ -0,0 +1,245 @@ +package com.alibaba.otter.canal.client.adapter.support; + +import org.junit.Assert; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Value; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; + +public class YamlUtilsTest { + + + + @Test + public void testLoadConfigToYml() { + String configStr="dataSourceKey: defaultDS\n" + + "destination: example\n" + + "groupId: g1\n" + + "outerAdapterKey: mysql1\n" + + "concurrent: true\n" + + "dbMapping:\n" + + " _id: _id\n" + + " database: mytest\n" + + " table: user\n" + + " targetTable: mytest2.user\n" + + " targetPk:\n" + + " id: id\n" + + "# mapAll: true\n" + + " targetColumns:\n" + + " id:\n" + + " name:\n" + + " role_id:\n" + + " c_time:\n" + + " test1:\n" + + " etlCondition: \"where c_time>={}\"\n" + + " commitBatch: 3000 # 批量提交的大小"; + + MappingConfig config = YamlUtils.ymlToObj(null, configStr, MappingConfig.class, null, new Properties()); + + Assert.assertNotNull(config); + Assert.assertEquals(config.getDbMapping().getId(), "_id"); + Assert.assertEquals(config.getDestination(), "example"); + Assert.assertEquals(config.getOuterAdapterKey(), "mysql1"); + Assert.assertEquals(config.getDbMapping().getDatabase(), "mytest"); + Assert.assertEquals(config.getDbMapping().getTargetColumns().size(), 5); + } + + private static class MappingConfig { + private String dataSourceKey; + + private String destination; + + private String groupId; + + private String outerAdapterKey; + + private boolean concurrent = false; + + private DbMapping dbMapping; + + public String getDataSourceKey() { + return dataSourceKey; + } + + public void setDataSourceKey(String dataSourceKey) { + this.dataSourceKey = dataSourceKey; + } + + public String getDestination() { + return destination; + } + + public void setDestination(String destination) { + this.destination = destination; + } + + public String getGroupId() { + return groupId; + } + + public void setGroupId(String groupId) { + this.groupId = groupId; + } + + public String getOuterAdapterKey() { + return outerAdapterKey; + } + + public void setOuterAdapterKey(String outerAdapterKey) { + this.outerAdapterKey = outerAdapterKey; + } + + public boolean isConcurrent() { + return concurrent; + } + + public void setConcurrent(boolean concurrent) { + this.concurrent = concurrent; + } + + public DbMapping getDbMapping() { + return dbMapping; + } + + public void setDbMapping(DbMapping dbMapping) { + this.dbMapping = dbMapping; + } + } + + private static class DbMapping { + + @Value("${_id}") + private String id ; + private boolean mirrorDb = false; // 是否镜像库 + private String database; // 数据库名或schema名 + private String table; // 表名 + private Map targetPk = new LinkedHashMap<>(); // 目标表主键字段 + private boolean mapAll = false; // 映射所有字段 + private String targetDb; // 目标库名 + private String targetTable; // 目标表名 + private Map targetColumns; // 目标表字段映射 + + private boolean caseInsensitive = false; // 目标表不区分大小写,默认是否 + + private String etlCondition; // etl条件sql + + private int readBatch = 5000; + private int commitBatch = 5000; // etl等批量提交大小 + + private Map allMapColumns; + + public boolean isMirrorDb() { + return mirrorDb; + } + + public void setMirrorDb(boolean mirrorDb) { + this.mirrorDb = mirrorDb; + } + + public String getDatabase() { + return database; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getTable() { + return table; + } + + public void setTable(String table) { + this.table = table; + } + + public Map getTargetPk() { + return targetPk; + } + + public void setTargetPk(Map targetPk) { + this.targetPk = targetPk; + } + + public boolean isMapAll() { + return mapAll; + } + + public void setMapAll(boolean mapAll) { + this.mapAll = mapAll; + } + + public String getTargetDb() { + return targetDb; + } + + public void setTargetDb(String targetDb) { + this.targetDb = targetDb; + } + + public String getTargetTable() { + return targetTable; + } + + public void setTargetTable(String targetTable) { + this.targetTable = targetTable; + } + + public Map getTargetColumns() { + return targetColumns; + } + + public void setTargetColumns(Map targetColumns) { + this.targetColumns = targetColumns; + } + + public boolean isCaseInsensitive() { + return caseInsensitive; + } + + public void setCaseInsensitive(boolean caseInsensitive) { + this.caseInsensitive = caseInsensitive; + } + + public String getEtlCondition() { + return etlCondition; + } + + public void setEtlCondition(String etlCondition) { + this.etlCondition = etlCondition; + } + + public int getReadBatch() { + return readBatch; + } + + public void setReadBatch(int readBatch) { + this.readBatch = readBatch; + } + + public int getCommitBatch() { + return commitBatch; + } + + public void setCommitBatch(int commitBatch) { + this.commitBatch = commitBatch; + } + + public Map getAllMapColumns() { + return allMapColumns; + } + + public void setAllMapColumns(Map allMapColumns) { + this.allMapColumns = allMapColumns; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + } +} diff --git a/client-adapter/es6x/pom.xml b/client-adapter/es6x/pom.xml index 43936f0228..bf1029460f 100644 --- a/client-adapter/es6x/pom.xml +++ b/client-adapter/es6x/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -27,22 +27,22 @@ org.elasticsearch elasticsearch - 6.4.3 + 6.8.22 org.elasticsearch.client transport - 6.4.3 + 6.8.22 org.elasticsearch.client elasticsearch-rest-client - 6.4.3 + 6.8.22 org.elasticsearch.client elasticsearch-rest-high-level-client - 6.4.3 + 6.8.22 diff --git a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/ES6xAdapter.java b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/ES6xAdapter.java index 85ac4085d6..647b62b53e 100644 --- a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/ES6xAdapter.java +++ b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/ES6xAdapter.java @@ -59,12 +59,12 @@ public void init(OuterAdapterConfig configuration, Properties envProperties) { public Map count(String task) { ESSyncConfig config = esSyncConfig.get(task); ESSyncConfig.ESMapping mapping = config.getEsMapping(); - SearchResponse response = this.esConnection.new ESSearchRequest(mapping.get_index(), mapping.get_type()).size(0) + SearchResponse response = this.esConnection.new ESSearchRequest(mapping.getIndex(), mapping.getType()).size(0) .getResponse(); long rowCount = response.getHits().getTotalHits(); Map res = new LinkedHashMap<>(); - res.put("esIndex", mapping.get_index()); + res.put("esIndex", mapping.getIndex()); res.put("count", rowCount); return res; } diff --git a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/etl/ESEtlService.java b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/etl/ESEtlService.java index 40b4a4abf2..b15719e088 100644 --- a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/etl/ESEtlService.java +++ b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/etl/ESEtlService.java @@ -51,7 +51,7 @@ public ESEtlService(ESConnection esConnection, ESSyncConfig config){ public EtlResult importData(List params) { ESMapping mapping = config.getEsMapping(); - logger.info("start etl to import data to index: {}", mapping.get_index()); + logger.info("start etl to import data to index: {}", mapping.getIndex()); String sql = mapping.getSql(); return importData(sql, params); } @@ -78,7 +78,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value } // 如果是主键字段则不插入 - if (fieldItem.getFieldName().equals(mapping.get_id())) { + if (fieldItem.getFieldName().equals(mapping.getId())) { idVal = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); } else { Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); @@ -117,8 +117,8 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value if (idVal != null) { String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), idVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { @@ -127,8 +127,8 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value esBulkRequest.add(esUpdateRequest); } else { - ESIndexRequest esIndexRequest = this.esConnection.new ES6xIndexRequest(mapping.get_index(), - mapping.get_type(), + ESIndexRequest esIndexRequest = this.esConnection.new ES6xIndexRequest(mapping.getIndex(), + mapping.getType(), idVal.toString()).setSource(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { esIndexRequest.setRouting(parentVal); @@ -137,12 +137,12 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value } } else { idVal = esFieldData.get(mapping.getPk()); - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index(), - mapping.get_type()).setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal)).size(10000); + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex(), + mapping.getType()).setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal)).size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), hit.getId()).setDoc(esFieldData); esBulkRequest.add(esUpdateRequest); } @@ -160,7 +160,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value (System.currentTimeMillis() - batchBegin), (System.currentTimeMillis() - esBatchBegin), esBulkRequest.numberOfActions(), - mapping.get_index()); + mapping.getIndex()); } batchBegin = System.currentTimeMillis(); esBulkRequest.resetBulk(); @@ -180,12 +180,12 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value (System.currentTimeMillis() - batchBegin), (System.currentTimeMillis() - esBatchBegin), esBulkRequest.numberOfActions(), - mapping.get_index()); + mapping.getIndex()); } } } catch (Exception e) { logger.error(e.getMessage(), e); - errMsg.add(mapping.get_index() + " etl failed! ==>" + e.getMessage()); + errMsg.add(mapping.getIndex() + " etl failed! ==>" + e.getMessage()); throw new RuntimeException(e); } return count; diff --git a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ES6xTemplate.java b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ES6xTemplate.java index ee74f46632..44617dc124 100644 --- a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ES6xTemplate.java +++ b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ES6xTemplate.java @@ -66,19 +66,19 @@ public void resetBulkRequestBuilder() { @Override public void insert(ESSyncConfig.ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { + if (mapping.getId() != null) { String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { - ESUpdateRequest updateRequest = esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest updateRequest = esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { updateRequest.setRouting(parentVal); } getBulk().add(updateRequest); } else { - ESIndexRequest indexRequest = esConnection.new ES6xIndexRequest(mapping.get_index(), - mapping.get_type(), + ESIndexRequest indexRequest = esConnection.new ES6xIndexRequest(mapping.getIndex(), + mapping.getType(), pkVal.toString()).setSource(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { indexRequest.setRouting(parentVal); @@ -87,13 +87,13 @@ public void insert(ESSyncConfig.ESMapping mapping, Object pkVal, Map paramsTmp, Ma @Override public void delete(ESSyncConfig.ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { - ESDeleteRequest esDeleteRequest = this.esConnection.new ES6xDeleteRequest(mapping.get_index(), - mapping.get_type(), + if (mapping.getId() != null) { + ESDeleteRequest esDeleteRequest = this.esConnection.new ES6xDeleteRequest(mapping.getIndex(), + mapping.getType(), pkVal.toString()); getBulk().add(esDeleteRequest); commitBulk(); } else { - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index(), - mapping.get_type()).setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)).size(10000); + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex(), + mapping.getType()).setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)).size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), hit.getId()).setDoc(esFieldData); getBulk().add(esUpdateRequest); commitBulk(); @@ -207,7 +207,7 @@ public Object getValFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSet, public Object getESDataFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSet, Map esFieldData) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); @@ -216,7 +216,7 @@ public Object getESDataFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSe resultIdVal = value; } - if (!fieldItem.getFieldName().equals(mapping.get_id()) + if (!fieldItem.getFieldName().equals(mapping.getId()) && !mapping.getSkips().contains(fieldItem.getFieldName())) { esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value); } @@ -231,7 +231,7 @@ public Object getESDataFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSe @Override public Object getIdValFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSet) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); @@ -248,7 +248,7 @@ public Object getIdValFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSet public Object getESDataFromRS(ESSyncConfig.ESMapping mapping, ResultSet resultSet, Map dmlOld, Map esFieldData) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { if (fieldItem.getFieldName().equals(idFieldName)) { @@ -294,7 +294,7 @@ public Object getValFromData(ESSyncConfig.ESMapping mapping, Map public Object getESDataFromDmlData(ESSyncConfig.ESMapping mapping, Map dmlData, Map esFieldData) { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { String columnName = fieldItem.getColumnItems().iterator().next().getColumnName(); @@ -304,7 +304,7 @@ public Object getESDataFromDmlData(ESSyncConfig.ESMapping mapping, Map dmlData, + public Object getESDataFromDmlData(ESSyncConfig.ESMapping mapping,String owner, Map dmlData, Map dmlOld, Map esFieldData) { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { - String columnName = fieldItem.getColumnItems().iterator().next().getColumnName(); + ColumnItem columnItem = fieldItem.getColumnItems().iterator().next(); + if (!columnItem.getOwner().equals(owner)) { + continue; + } + String columnName = columnItem.getColumnName(); if (fieldItem.getFieldName().equals(idFieldName)) { resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName); @@ -349,19 +353,19 @@ private void commitBulk() { } private void append4Update(ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { + if (mapping.getId() != null) { String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { esUpdateRequest.setRouting(parentVal); } getBulk().add(esUpdateRequest); } else { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), pkVal.toString()).setDoc(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { esUpdateRequest.setRouting(parentVal); @@ -369,12 +373,12 @@ private void append4Update(ESMapping mapping, Object pkVal, Map getBulk().add(esUpdateRequest); } } else { - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index(), - mapping.get_type()).setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)).size(10000); + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex(), + mapping.getType()).setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)).size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.get_index(), - mapping.get_type(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES6xUpdateRequest(mapping.getIndex(), + mapping.getType(), hit.getId()).setDoc(esFieldData); getBulk().add(esUpdateRequest); } @@ -390,15 +394,15 @@ private void append4Update(ESMapping mapping, Object pkVal, Map */ @SuppressWarnings("unchecked") private String getEsType(ESMapping mapping, String fieldName) { - String key = mapping.get_index() + "-" + mapping.get_type(); + String key = mapping.getIndex() + "-" + mapping.getType(); Map fieldType = esFieldTypes.get(key); if (fieldType != null) { return fieldType.get(fieldName); } else { - MappingMetaData mappingMetaData = esConnection.getMapping(mapping.get_index(), mapping.get_type()); + MappingMetaData mappingMetaData = esConnection.getMapping(mapping.getIndex(), mapping.getType()); if (mappingMetaData == null) { - throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index()); + throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.getIndex()); } fieldType = new LinkedHashMap<>(); diff --git a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ESConnection.java b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ESConnection.java index 266e0b5b26..b63fc9a55c 100644 --- a/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ESConnection.java +++ b/client-adapter/es6x/src/main/java/com/alibaba/otter/canal/client/adapter/es6x/support/ESConnection.java @@ -150,7 +150,13 @@ public MappingMetaData getMapping(String index, String type) { logger.error(e.getMessage(), e); return null; } - mappingMetaData = mappings.get(index).get(type); + + //通过别名查询mapping返回的是真实索引名称,mappings.get(index)返回null,为兼容别名情况修改如下: + ImmutableOpenMap esIndex = mappings.get(index); + if(esIndex == null){ + esIndex = mappings.valuesIt().next(); + } + mappingMetaData = esIndex.get(type); } return mappingMetaData; } diff --git a/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/ConfigLoadTest.java b/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/ConfigLoadTest.java index 2d5c74b677..a190cba4af 100644 --- a/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/ConfigLoadTest.java +++ b/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/ConfigLoadTest.java @@ -29,9 +29,9 @@ public void testLoad() { Assert.assertNotNull(config); Assert.assertEquals("defaultDS", config.getDataSourceKey()); ESSyncConfig.ESMapping esMapping = config.getEsMapping(); - Assert.assertEquals("mytest_user", esMapping.get_index()); - Assert.assertEquals("_doc", esMapping.get_type()); - Assert.assertEquals("id", esMapping.get_id()); + Assert.assertEquals("mytest_user", esMapping.getIndex()); + Assert.assertEquals("_doc", esMapping.getType()); + Assert.assertEquals("id", esMapping.getId()); Assert.assertNotNull(esMapping.getSql()); // Map> dbTableEsSyncConfig = diff --git a/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/sync/Common.java b/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/sync/Common.java index 4f11b4618b..b4664b7559 100644 --- a/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/sync/Common.java +++ b/client-adapter/es6x/src/test/java/com/alibaba/otter/canal/client/adapter/es6x/test/sync/Common.java @@ -5,6 +5,7 @@ import java.sql.Statement; import java.util.HashMap; import java.util.Map; +import java.util.Properties; import javax.sql.DataSource; @@ -26,7 +27,7 @@ public static ES6xAdapter init() { outerAdapterConfig.setProperties(properties); ES6xAdapter esAdapter = new ES6xAdapter(); - esAdapter.init(outerAdapterConfig, null); + esAdapter.init(outerAdapterConfig, new Properties()); return esAdapter; } diff --git a/client-adapter/es7x/pom.xml b/client-adapter/es7x/pom.xml index 472934de09..31ab1c70b8 100644 --- a/client-adapter/es7x/pom.xml +++ b/client-adapter/es7x/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 diff --git a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/ES7xAdapter.java b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/ES7xAdapter.java index 18fd572e62..94ad1c1730 100644 --- a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/ES7xAdapter.java +++ b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/ES7xAdapter.java @@ -59,11 +59,11 @@ public void init(OuterAdapterConfig configuration, Properties envProperties) { public Map count(String task) { ESSyncConfig config = esSyncConfig.get(task); ESSyncConfig.ESMapping mapping = config.getEsMapping(); - SearchResponse response = this.esConnection.new ESSearchRequest(mapping.get_index()).size(0).getResponse(); + SearchResponse response = this.esConnection.new ESSearchRequest(mapping.getIndex()).size(0).getResponse(); long rowCount = response.getHits().getTotalHits().value; Map res = new LinkedHashMap<>(); - res.put("esIndex", mapping.get_index()); + res.put("esIndex", mapping.getIndex()); res.put("count", rowCount); return res; } diff --git a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/etl/ESEtlService.java b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/etl/ESEtlService.java index 05b20cf970..a70ea53578 100644 --- a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/etl/ESEtlService.java +++ b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/etl/ESEtlService.java @@ -51,7 +51,7 @@ public ESEtlService(ESConnection esConnection, ESSyncConfig config){ public EtlResult importData(List params) { ESMapping mapping = config.getEsMapping(); - logger.info("start etl to import data to index: {}", mapping.get_index()); + logger.info("start etl to import data to index: {}", mapping.getIndex()); String sql = mapping.getSql(); return importData(sql, params); } @@ -78,7 +78,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value } // 如果是主键字段则不插入 - if (fieldItem.getFieldName().equals(mapping.get_id())) { + if (fieldItem.getFieldName().equals(mapping.getId())) { idVal = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); } else { Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); @@ -118,7 +118,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest( - mapping.get_index(), + mapping.getIndex(), idVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { @@ -128,7 +128,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value esBulkRequest.add(esUpdateRequest); } else { ESIndexRequest esIndexRequest = this.esConnection.new ES7xIndexRequest( - mapping.get_index(), + mapping.getIndex(), idVal.toString()).setSource(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { esIndexRequest.setRouting(parentVal); @@ -137,13 +137,13 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value } } else { idVal = esFieldData.get(mapping.getPk()); - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index()) + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal)) .size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest( - mapping.get_index(), + mapping.getIndex(), hit.getId()).setDoc(esFieldData); esBulkRequest.add(esUpdateRequest); } @@ -162,7 +162,7 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value (System.currentTimeMillis() - batchBegin), (System.currentTimeMillis() - esBatchBegin), esBulkRequest.numberOfActions(), - mapping.get_index()); + mapping.getIndex()); } batchBegin = System.currentTimeMillis(); esBulkRequest.resetBulk(); @@ -182,12 +182,12 @@ protected boolean executeSqlImport(DataSource ds, String sql, List value (System.currentTimeMillis() - batchBegin), (System.currentTimeMillis() - esBatchBegin), esBulkRequest.numberOfActions(), - mapping.get_index()); + mapping.getIndex()); } } } catch (Exception e) { logger.error(e.getMessage(), e); - errMsg.add(mapping.get_index() + " etl failed! ==>" + e.getMessage()); + errMsg.add(mapping.getIndex() + " etl failed! ==>" + e.getMessage()); throw new RuntimeException(e); } return count; diff --git a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/support/ES7xTemplate.java b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/support/ES7xTemplate.java index 8dd3776121..4f2ff06131 100644 --- a/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/support/ES7xTemplate.java +++ b/client-adapter/es7x/src/main/java/com/alibaba/otter/canal/client/adapter/es7x/support/ES7xTemplate.java @@ -66,17 +66,17 @@ public void resetBulkRequestBuilder() { @Override public void insert(ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { + if (mapping.getId() != null) { String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { - ESUpdateRequest updateRequest = esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest updateRequest = esConnection.new ES7xUpdateRequest(mapping.getIndex(), pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { updateRequest.setRouting(parentVal); } getBulk().add(updateRequest); } else { - ESIndexRequest indexRequest = esConnection.new ES7xIndexRequest(mapping.get_index(), pkVal.toString()) + ESIndexRequest indexRequest = esConnection.new ES7xIndexRequest(mapping.getIndex(), pkVal.toString()) .setSource(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { indexRequest.setRouting(parentVal); @@ -85,13 +85,13 @@ public void insert(ESMapping mapping, Object pkVal, Map esFieldD } commitBulk(); } else { - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index()) + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) .size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.getIndex(), hit.getId()).setDoc(esFieldData); getBulk().add(esUpdateRequest); commitBulk(); @@ -148,18 +148,18 @@ public void updateByQuery(ESSyncConfig config, Map paramsTmp, Ma @Override public void delete(ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { - ESDeleteRequest esDeleteRequest = this.esConnection.new ES7xDeleteRequest(mapping.get_index(), + if (mapping.getId() != null) { + ESDeleteRequest esDeleteRequest = this.esConnection.new ES7xDeleteRequest(mapping.getIndex(), pkVal.toString()); getBulk().add(esDeleteRequest); commitBulk(); } else { - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index()) + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) .size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.getIndex(), hit.getId()).setDoc(esFieldData); getBulk().add(esUpdateRequest); commitBulk(); @@ -204,7 +204,7 @@ public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldN public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map esFieldData) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); @@ -213,7 +213,7 @@ public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, resultIdVal = value; } - if (!fieldItem.getFieldName().equals(mapping.get_id()) + if (!fieldItem.getFieldName().equals(mapping.getId()) && !mapping.getSkips().contains(fieldItem.getFieldName())) { esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value); } @@ -228,7 +228,7 @@ public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, @Override public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); @@ -245,7 +245,7 @@ public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLE public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map dmlOld, Map esFieldData) throws SQLException { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { if (fieldItem.getFieldName().equals(idFieldName)) { @@ -287,10 +287,9 @@ public Object getValFromData(ESMapping mapping, Map dmlData, Str } @Override - public Object getESDataFromDmlData(ESMapping mapping, Map dmlData, - Map esFieldData) { + public Object getESDataFromDmlData(ESMapping mapping, Map dmlData, Map esFieldData) { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { String columnName = fieldItem.getColumnItems().iterator().next().getColumnName(); @@ -300,7 +299,7 @@ public Object getESDataFromDmlData(ESMapping mapping, Map dmlDat resultIdVal = value; } - if (!fieldItem.getFieldName().equals(mapping.get_id()) + if (!fieldItem.getFieldName().equals(mapping.getId()) && !mapping.getSkips().contains(fieldItem.getFieldName())) { esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value); } @@ -312,13 +311,17 @@ public Object getESDataFromDmlData(ESMapping mapping, Map dmlDat } @Override - public Object getESDataFromDmlData(ESMapping mapping, Map dmlData, Map dmlOld, + public Object getESDataFromDmlData(ESMapping mapping,String owner, Map dmlData, Map dmlOld, Map esFieldData) { SchemaItem schemaItem = mapping.getSchemaItem(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); Object resultIdVal = null; for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { - String columnName = fieldItem.getColumnItems().iterator().next().getColumnName(); + ColumnItem columnItem = fieldItem.getColumnItems().iterator().next(); + if (!columnItem.getOwner().equals(owner)) { + continue; + } + String columnName = columnItem.getColumnName(); if (fieldItem.getFieldName().equals(idFieldName)) { resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName); @@ -345,17 +348,17 @@ private void commitBulk() { } private void append4Update(ESMapping mapping, Object pkVal, Map esFieldData) { - if (mapping.get_id() != null) { + if (mapping.getId() != null) { String parentVal = (String) esFieldData.remove("$parent_routing"); if (mapping.isUpsert()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.getIndex(), pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); if (StringUtils.isNotEmpty(parentVal)) { esUpdateRequest.setRouting(parentVal); } getBulk().add(esUpdateRequest); } else { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.getIndex(), pkVal.toString()).setDoc(esFieldData); if (StringUtils.isNotEmpty(parentVal)) { esUpdateRequest.setRouting(parentVal); @@ -363,12 +366,12 @@ private void append4Update(ESMapping mapping, Object pkVal, Map getBulk().add(esUpdateRequest); } } else { - ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.get_index()) + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) .size(10000); SearchResponse response = esSearchRequest.getResponse(); for (SearchHit hit : response.getHits()) { - ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.get_index(), + ESUpdateRequest esUpdateRequest = this.esConnection.new ES7xUpdateRequest(mapping.getIndex(), hit.getId()).setDoc(esFieldData); getBulk().add(esUpdateRequest); } @@ -384,15 +387,15 @@ private void append4Update(ESMapping mapping, Object pkVal, Map */ @SuppressWarnings("unchecked") private String getEsType(ESMapping mapping, String fieldName) { - String key = mapping.get_index() + "-" + mapping.get_type(); + String key = mapping.getIndex() + "-" + mapping.getType(); Map fieldType = esFieldTypes.get(key); if (fieldType != null) { return fieldType.get(fieldName); } else { - MappingMetaData mappingMetaData = esConnection.getMapping(mapping.get_index()); + MappingMetaData mappingMetaData = esConnection.getMapping(mapping.getIndex()); if (mappingMetaData == null) { - throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.get_index()); + throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.getIndex()); } fieldType = new LinkedHashMap<>(); diff --git a/client-adapter/es8x/pom.xml b/client-adapter/es8x/pom.xml new file mode 100644 index 0000000000..1e0aa054b7 --- /dev/null +++ b/client-adapter/es8x/pom.xml @@ -0,0 +1,93 @@ + + + + canal.client-adapter + com.alibaba.otter + 1.1.8-SNAPSHOT + ../pom.xml + + 4.0.0 + com.alibaba.otter + client-adapter.es8x + jar + canal client adapter es v8x module for otter ${project.version} + + + + com.alibaba.otter + client-adapter.common + ${project.version} + provided + + + com.alibaba.otter + client-adapter.escore + ${project.version} + + + co.elastic.clients + elasticsearch-java + 8.6.2 + + + + org.elasticsearch.client + elasticsearch-rest-high-level-client + 7.17.9 + + + + junit + junit + test + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.4 + + + jar-with-dependencies + + + + + make-assembly + package + + single + + + + + + maven-antrun-plugin + + + package + + run + + + + + + + + + + + + + + + + diff --git a/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/ES8xAdapter.java b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/ES8xAdapter.java new file mode 100644 index 0000000000..904a44e458 --- /dev/null +++ b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/ES8xAdapter.java @@ -0,0 +1,117 @@ +package com.alibaba.otter.canal.client.adapter.es8x; + +import com.alibaba.otter.canal.client.adapter.es.core.ESAdapter; +import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig; +import com.alibaba.otter.canal.client.adapter.es8x.etl.ESEtlService; +import com.alibaba.otter.canal.client.adapter.es8x.support.ES8xTemplate; +import com.alibaba.otter.canal.client.adapter.es8x.support.ESConnection; +import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig; +import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; +import com.alibaba.otter.canal.client.adapter.support.SPI; +import org.elasticsearch.action.search.SearchResponse; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +/** + * ES 8.x 外部适配器 + * + * @author ymz 2013-02-23 + * @version 1.0.0 + */ +@SPI("es8") +public class ES8xAdapter extends ESAdapter { + + private ESConnection esConnection; + + public ESConnection getEsConnection() { + return esConnection; + } + + @Override + public void init(OuterAdapterConfig configuration, Properties envProperties) { + try { + Map properties = configuration.getProperties(); + + String[] hostArray = configuration.getHosts().split(","); + esConnection = new ESConnection(hostArray, properties); + + this.esTemplate = new ES8xTemplate(esConnection); + + envProperties.put("es.version", "es8"); + super.init(configuration, envProperties); + } catch (Throwable e) { + throw new RuntimeException(e); + } + } + + @Override + public Map count(String task) { + ESSyncConfig config = esSyncConfig.get(task); + ESSyncConfig.ESMapping mapping = config.getEsMapping(); + SearchResponse response = this.esConnection.new ESSearchRequest(mapping.getIndex()).size(0).getResponse(); + + long rowCount = response.getHits().getTotalHits().value; + Map res = new LinkedHashMap<>(); + res.put("esIndex", mapping.getIndex()); + res.put("count", rowCount); + return res; + } + + @Override + public EtlResult etl(String task, List params) { + EtlResult etlResult = new EtlResult(); + ESSyncConfig config = esSyncConfig.get(task); + if (config != null) { + DataSource dataSource = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey()); + ESEtlService esEtlService = new ESEtlService(esConnection, config); + if (dataSource != null) { + return esEtlService.importData(params); + } else { + etlResult.setSucceeded(false); + etlResult.setErrorMessage("DataSource not found"); + return etlResult; + } + } else { + StringBuilder resultMsg = new StringBuilder(); + boolean resSuccess = true; + for (ESSyncConfig configTmp : esSyncConfig.values()) { + // 取所有的destination为task的配置 + if (configTmp.getDestination().equals(task)) { + ESEtlService esEtlService = new ESEtlService(esConnection, configTmp); + EtlResult etlRes = esEtlService.importData(params); + if (!etlRes.getSucceeded()) { + resSuccess = false; + resultMsg.append(etlRes.getErrorMessage()).append("\n"); + } else { + resultMsg.append(etlRes.getResultMessage()).append("\n"); + } + } + } + if (resultMsg.length() > 0) { + etlResult.setSucceeded(resSuccess); + if (resSuccess) { + etlResult.setResultMessage(resultMsg.toString()); + } else { + etlResult.setErrorMessage(resultMsg.toString()); + } + return etlResult; + } + } + etlResult.setSucceeded(false); + etlResult.setErrorMessage("Task not found"); + return etlResult; + } + + @Override + public void destroy() { + super.destroy(); + if (esConnection != null) { + esConnection.close(); + } + } +} diff --git a/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/etl/ESEtlService.java b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/etl/ESEtlService.java new file mode 100644 index 0000000000..3ec64e9588 --- /dev/null +++ b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/etl/ESEtlService.java @@ -0,0 +1,202 @@ +package com.alibaba.otter.canal.client.adapter.es8x.etl; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +import javax.sql.DataSource; + +import org.apache.commons.lang.StringUtils; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; + +import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig; +import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig.ESMapping; +import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.FieldItem; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESBulkResponse; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESIndexRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESUpdateRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESTemplate; +import com.alibaba.otter.canal.client.adapter.es8x.support.ES8xTemplate; +import com.alibaba.otter.canal.client.adapter.es8x.support.ESConnection; +import com.alibaba.otter.canal.client.adapter.es8x.support.ESConnection.ESSearchRequest; +import com.alibaba.otter.canal.client.adapter.support.AbstractEtlService; +import com.alibaba.otter.canal.client.adapter.support.AdapterConfig; +import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.Util; + +/** + * ES ETL Service + * + * @author rewerma 2018-11-01 + * @version 1.0.0 + */ +public class ESEtlService extends AbstractEtlService { + + private ESConnection esConnection; + private ESTemplate esTemplate; + private ESSyncConfig config; + + public ESEtlService(ESConnection esConnection, ESSyncConfig config){ + super("ES", config); + this.esConnection = esConnection; + this.esTemplate = new ES8xTemplate(esConnection); + this.config = config; + } + + public EtlResult importData(List params) { + ESMapping mapping = config.getEsMapping(); + logger.info("start etl to import data to index: {}", mapping.getIndex()); + String sql = mapping.getSql(); + return importData(sql, params); + } + + protected boolean executeSqlImport(DataSource ds, String sql, List values, + AdapterConfig.AdapterMapping adapterMapping, AtomicLong impCount, + List errMsg) { + try { + ESMapping mapping = (ESMapping) adapterMapping; + Util.sqlRS(ds, sql, values, rs -> { + int count = 0; + try { + ESBulkRequest esBulkRequest = this.esConnection.new ES8xBulkRequest(); + + long batchBegin = System.currentTimeMillis(); + while (rs.next()) { + Map esFieldData = new LinkedHashMap<>(); + Object idVal = null; + for (FieldItem fieldItem : mapping.getSchemaItem().getSelectFields().values()) { + + String fieldName = fieldItem.getFieldName(); + if (mapping.getSkips().contains(fieldName)) { + continue; + } + + // 如果是主键字段则不插入 + if (fieldItem.getFieldName().equals(mapping.getId())) { + idVal = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); + } else { + Object val = esTemplate.getValFromRS(mapping, rs, fieldName, fieldName); + esFieldData.put(Util.cleanColumn(fieldName), val); + } + + } + + if (!mapping.getRelations().isEmpty()) { + mapping.getRelations().forEach((relationField, relationMapping) -> { + Map relations = new HashMap<>(); + relations.put("name", relationMapping.getName()); + if (StringUtils.isNotEmpty(relationMapping.getParent())) { + FieldItem parentFieldItem = mapping.getSchemaItem() + .getSelectFields() + .get(relationMapping.getParent()); + Object parentVal; + try { + parentVal = esTemplate.getValFromRS(mapping, + rs, + parentFieldItem.getFieldName(), + parentFieldItem.getFieldName()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + if (parentVal != null) { + relations.put("parent", parentVal.toString()); + esFieldData.put("$parent_routing", parentVal.toString()); + + } + } + esFieldData.put(Util.cleanColumn(relationField), relations); + }); + } + + if (idVal != null) { + String parentVal = (String) esFieldData.remove("$parent_routing"); + if (mapping.isUpsert()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest( + mapping.getIndex(), + idVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); + + if (StringUtils.isNotEmpty(parentVal)) { + esUpdateRequest.setRouting(parentVal); + } + + esBulkRequest.add(esUpdateRequest); + } else { + ESIndexRequest esIndexRequest = this.esConnection.new ES8xIndexRequest( + mapping.getIndex(), + idVal.toString()).setSource(esFieldData); + if (StringUtils.isNotEmpty(parentVal)) { + esIndexRequest.setRouting(parentVal); + } + esBulkRequest.add(esIndexRequest); + } + } else { + idVal = esFieldData.get(mapping.getPk()); + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) + .setQuery(QueryBuilders.termQuery(mapping.getPk(), idVal)) + .size(10000); + SearchResponse response = esSearchRequest.getResponse(); + for (SearchHit hit : response.getHits()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest( + mapping.getIndex(), + hit.getId()).setDoc(esFieldData); + esBulkRequest.add(esUpdateRequest); + } + } + + if (esBulkRequest.numberOfActions() % mapping.getCommitBatch() == 0 + && esBulkRequest.numberOfActions() > 0) { + long esBatchBegin = System.currentTimeMillis(); + ESBulkResponse rp = esBulkRequest.bulk(); + if (rp.hasFailures()) { + rp.processFailBulkResponse("全量数据 etl 异常 "); + } + + if (logger.isTraceEnabled()) { + logger.trace("全量数据批量导入批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}", + (System.currentTimeMillis() - batchBegin), + (System.currentTimeMillis() - esBatchBegin), + esBulkRequest.numberOfActions(), + mapping.getIndex()); + } + batchBegin = System.currentTimeMillis(); + esBulkRequest.resetBulk(); + } + count++; + impCount.incrementAndGet(); + } + + if (esBulkRequest.numberOfActions() > 0) { + long esBatchBegin = System.currentTimeMillis(); + ESBulkResponse rp = esBulkRequest.bulk(); + if (rp.hasFailures()) { + rp.processFailBulkResponse("全量数据 etl 异常 "); + } + if (logger.isTraceEnabled()) { + logger.trace("全量数据批量导入最后批次耗时: {}, es执行时间: {}, 批次大小: {}, index; {}", + (System.currentTimeMillis() - batchBegin), + (System.currentTimeMillis() - esBatchBegin), + esBulkRequest.numberOfActions(), + mapping.getIndex()); + } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + errMsg.add(mapping.getIndex() + " etl failed! ==>" + e.getMessage()); + throw new RuntimeException(e); + } + return count; + }); + + return true; + } catch (Exception e) { + logger.error(e.getMessage(), e); + return false; + } + } +} diff --git a/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ES8xTemplate.java b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ES8xTemplate.java new file mode 100644 index 0000000000..7f125b7ed7 --- /dev/null +++ b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ES8xTemplate.java @@ -0,0 +1,466 @@ +package com.alibaba.otter.canal.client.adapter.es8x.support; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import javax.sql.DataSource; + +import org.apache.commons.lang.StringUtils; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig; +import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig.ESMapping; +import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem; +import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.ColumnItem; +import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.FieldItem; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESBulkResponse; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESDeleteRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESIndexRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest.ESUpdateRequest; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESSyncUtil; +import com.alibaba.otter.canal.client.adapter.es.core.support.ESTemplate; +import com.alibaba.otter.canal.client.adapter.es8x.support.ESConnection.ESSearchRequest; +import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig; +import com.alibaba.otter.canal.client.adapter.support.Util; + +public class ES8xTemplate implements ESTemplate { + + private static final Logger logger = LoggerFactory + .getLogger(ESTemplate.class); + + private static final int MAX_BATCH_SIZE = 1000; + + private ESConnection esConnection; + + private ESBulkRequest esBulkRequest; + + // es 字段类型本地缓存 + private static ConcurrentMap> esFieldTypes = new ConcurrentHashMap<>(); + + public ES8xTemplate(ESConnection esConnection){ + this.esConnection = esConnection; + this.esBulkRequest = this.esConnection.new ES8xBulkRequest(); + } + + public ESBulkRequest getBulk() { + return esBulkRequest; + } + + public void resetBulkRequestBuilder() { + this.esBulkRequest.resetBulk(); + } + + @Override + public void insert(ESMapping mapping, Object pkVal, Map esFieldData) { + if (mapping.getId() != null) { + String parentVal = (String) esFieldData.remove("$parent_routing"); + if (mapping.isUpsert()) { + ESUpdateRequest updateRequest = esConnection.new ES8xUpdateRequest(mapping.getIndex(), + pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); + if (StringUtils.isNotEmpty(parentVal)) { + updateRequest.setRouting(parentVal); + } + getBulk().add(updateRequest); + } else { + ESIndexRequest indexRequest = esConnection.new ES8xIndexRequest(mapping.getIndex(), pkVal.toString()) + .setSource(esFieldData); + if (StringUtils.isNotEmpty(parentVal)) { + indexRequest.setRouting(parentVal); + } + getBulk().add(indexRequest); + } + commitBulk(); + } else { + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) + .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) + .size(10000); + SearchResponse response = esSearchRequest.getResponse(); + + for (SearchHit hit : response.getHits()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest(mapping.getIndex(), + hit.getId()).setDoc(esFieldData); + getBulk().add(esUpdateRequest); + commitBulk(); + } + } + } + + @Override + public void update(ESMapping mapping, Object pkVal, Map esFieldData) { + Map esFieldDataTmp = new LinkedHashMap<>(esFieldData.size()); + esFieldData.forEach((k, v) -> esFieldDataTmp.put(Util.cleanColumn(k), v)); + append4Update(mapping, pkVal, esFieldDataTmp); + commitBulk(); + } + + @Override + public void updateByQuery(ESSyncConfig config, Map paramsTmp, Map esFieldData) { + if (paramsTmp.isEmpty()) { + return; + } + ESMapping mapping = config.getEsMapping(); + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); + paramsTmp.forEach((fieldName, value) -> queryBuilder.must(QueryBuilders.termsQuery(fieldName, value))); + + // 查询sql批量更新 + DataSource ds = DatasourceConfig.DATA_SOURCES.get(config.getDataSourceKey()); + StringBuilder sql = new StringBuilder("SELECT * FROM (" + mapping.getSql() + ") _v WHERE "); + List values = new ArrayList<>(); + paramsTmp.forEach((fieldName, value) -> { + sql.append("_v.").append(fieldName).append("=? AND "); + values.add(value); + }); + // TODO 直接外部包裹sql会导致全表扫描性能低, 待优化拼接内部where条件 + int len = sql.length(); + sql.delete(len - 4, len); + Integer syncCount = (Integer) Util.sqlRS(ds, sql.toString(), values, rs -> { + int count = 0; + try { + while (rs.next()) { + Object idVal = getIdValFromRS(mapping, rs); + append4Update(mapping, idVal, esFieldData); + commitBulk(); + count++; + } + } catch (Exception e) { + throw new RuntimeException(e); + } + return count; + }); + if (logger.isTraceEnabled()) { + logger.trace("Update ES by query affected {} records", syncCount); + } + } + + @Override + public void delete(ESMapping mapping, Object pkVal, Map esFieldData) { + if (mapping.getId() != null) { + ESDeleteRequest esDeleteRequest = this.esConnection.new ES8xDeleteRequest(mapping.getIndex(), + pkVal.toString()); + getBulk().add(esDeleteRequest); + commitBulk(); + } else { + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) + .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) + .size(10000); + SearchResponse response = esSearchRequest.getResponse(); + for (SearchHit hit : response.getHits()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest(mapping.getIndex(), + hit.getId()).setDoc(esFieldData); + getBulk().add(esUpdateRequest); + commitBulk(); + } + } + } + + @Override + public void commit() { + if (getBulk().numberOfActions() > 0) { + ESBulkResponse response = getBulk().bulk(); + if (response.hasFailures()) { + response.processFailBulkResponse("ES sync commit error "); + } + resetBulkRequestBuilder(); + } + } + + @Override + public Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName, + String columnName) throws SQLException { + fieldName = Util.cleanColumn(fieldName); + columnName = Util.cleanColumn(columnName); + String esType = getEsType(mapping, fieldName); + + Object value = resultSet.getObject(columnName); + if (value instanceof Boolean) { + if (!"boolean".equals(esType)) { + value = resultSet.getByte(columnName); + } + } + + // 如果是对象类型 + if (mapping.getObjFields().containsKey(fieldName)) { + return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName)); + } else { + return ESSyncUtil.typeConvert(value, esType); + } + } + + @Override + public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, + Map esFieldData) throws SQLException { + SchemaItem schemaItem = mapping.getSchemaItem(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); + Object resultIdVal = null; + for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { + Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); + + if (fieldItem.getFieldName().equals(idFieldName)) { + resultIdVal = value; + } + + if (!fieldItem.getFieldName().equals(mapping.getId()) + && !mapping.getSkips().contains(fieldItem.getFieldName())) { + esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value); + } + } + + // 添加父子文档关联信息 + putRelationDataFromRS(mapping, schemaItem, resultSet, esFieldData); + + return resultIdVal; + } + + @Override + public Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException { + SchemaItem schemaItem = mapping.getSchemaItem(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); + Object resultIdVal = null; + for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { + Object value = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); + + if (fieldItem.getFieldName().equals(idFieldName)) { + resultIdVal = value; + break; + } + } + return resultIdVal; + } + + @Override + public Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map dmlOld, + Map esFieldData) throws SQLException { + SchemaItem schemaItem = mapping.getSchemaItem(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); + Object resultIdVal = null; + for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { + if (fieldItem.getFieldName().equals(idFieldName)) { + resultIdVal = getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName()); + } + + for (ColumnItem columnItem : fieldItem.getColumnItems()) { + if (dmlOld.containsKey(columnItem.getColumnName()) + && !mapping.getSkips().contains(fieldItem.getFieldName())) { + esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), + getValFromRS(mapping, resultSet, fieldItem.getFieldName(), fieldItem.getFieldName())); + break; + } + } + } + + // 添加父子文档关联信息 + putRelationDataFromRS(mapping, schemaItem, resultSet, esFieldData); + + return resultIdVal; + } + + @Override + public Object getValFromData(ESMapping mapping, Map dmlData, String fieldName, String columnName) { + String esType = getEsType(mapping, fieldName); + Object value = dmlData.get(columnName); + if (value instanceof Byte) { + if ("boolean".equals(esType)) { + value = ((Byte) value).intValue() != 0; + } + } + + // 如果是对象类型 + if (mapping.getObjFields().containsKey(fieldName)) { + return ESSyncUtil.convertToEsObj(value, mapping.getObjFields().get(fieldName)); + } else { + return ESSyncUtil.typeConvert(value, esType); + } + } + + @Override + public Object getESDataFromDmlData(ESMapping mapping, Map dmlData, + Map esFieldData) { + SchemaItem schemaItem = mapping.getSchemaItem(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); + Object resultIdVal = null; + for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { + String columnName = fieldItem.getColumnItems().iterator().next().getColumnName(); + Object value = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName); + + if (fieldItem.getFieldName().equals(idFieldName)) { + resultIdVal = value; + } + + if (!fieldItem.getFieldName().equals(mapping.getId()) + && !mapping.getSkips().contains(fieldItem.getFieldName())) { + esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), value); + } + } + + // 添加父子文档关联信息 + putRelationData(mapping, schemaItem, dmlData, esFieldData); + return resultIdVal; + } + + @Override + public Object getESDataFromDmlData(ESMapping mapping, String owner, Map dmlData, + Map dmlOld, Map esFieldData) { + SchemaItem schemaItem = mapping.getSchemaItem(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); + Object resultIdVal = null; + for (FieldItem fieldItem : schemaItem.getSelectFields().values()) { + ColumnItem columnItem = fieldItem.getColumnItems().iterator().next(); + if (!columnItem.getOwner().equals(owner)) { + continue; + } + String columnName = columnItem.getColumnName(); + + if (fieldItem.getFieldName().equals(idFieldName)) { + resultIdVal = getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName); + } + + if (dmlOld.containsKey(columnName) && !mapping.getSkips().contains(fieldItem.getFieldName())) { + esFieldData.put(Util.cleanColumn(fieldItem.getFieldName()), + getValFromData(mapping, dmlData, fieldItem.getFieldName(), columnName)); + } + } + + // 添加父子文档关联信息 + putRelationData(mapping, schemaItem, dmlOld, esFieldData); + return resultIdVal; + } + + /** + * 如果大于批量数则提交批次 + */ + private void commitBulk() { + if (getBulk().numberOfActions() >= MAX_BATCH_SIZE) { + commit(); + } + } + + private void append4Update(ESMapping mapping, Object pkVal, Map esFieldData) { + if (mapping.getId() != null) { + String parentVal = (String) esFieldData.remove("$parent_routing"); + if (mapping.isUpsert()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest(mapping.getIndex(), + pkVal.toString()).setDoc(esFieldData).setDocAsUpsert(true); + if (StringUtils.isNotEmpty(parentVal)) { + esUpdateRequest.setRouting(parentVal); + } + getBulk().add(esUpdateRequest); + } else { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest(mapping.getIndex(), + pkVal.toString()).setDoc(esFieldData); + if (StringUtils.isNotEmpty(parentVal)) { + esUpdateRequest.setRouting(parentVal); + } + getBulk().add(esUpdateRequest); + } + } else { + ESSearchRequest esSearchRequest = this.esConnection.new ESSearchRequest(mapping.getIndex()) + .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) + .size(10000); + SearchResponse response = esSearchRequest.getResponse(); + for (SearchHit hit : response.getHits()) { + ESUpdateRequest esUpdateRequest = this.esConnection.new ES8xUpdateRequest(mapping.getIndex(), + hit.getId()).setDoc(esFieldData); + getBulk().add(esUpdateRequest); + } + } + } + + /** + * 获取es mapping中的属性类型 + * + * @param mapping mapping配置 + * @param fieldName 属性名 + * @return 类型 + */ + @SuppressWarnings("unchecked") + private String getEsType(ESMapping mapping, String fieldName) { + String key = mapping.getIndex() + "-" + mapping.getType(); + Map fieldType = esFieldTypes.get(key); + if (fieldType != null) { + return fieldType.get(fieldName); + } else { + MappingMetadata mappingMetaData = esConnection.getMapping(mapping.getIndex()); + + if (mappingMetaData == null) { + throw new IllegalArgumentException("Not found the mapping info of index: " + mapping.getIndex()); + } + + fieldType = new LinkedHashMap<>(); + + Map sourceMap = mappingMetaData.getSourceAsMap(); + Map esMapping = (Map) sourceMap.get("properties"); + for (Map.Entry entry : esMapping.entrySet()) { + Map value = (Map) entry.getValue(); + if (value.containsKey("properties")) { + fieldType.put(entry.getKey(), "object"); + } else { + fieldType.put(entry.getKey(), (String) value.get("type")); + } + } + esFieldTypes.put(key, fieldType); + + return fieldType.get(fieldName); + } + } + + private void putRelationDataFromRS(ESMapping mapping, SchemaItem schemaItem, ResultSet resultSet, + Map esFieldData) { + // 添加父子文档关联信息 + if (!mapping.getRelations().isEmpty()) { + mapping.getRelations().forEach((relationField, relationMapping) -> { + Map relations = new HashMap<>(); + relations.put("name", relationMapping.getName()); + if (StringUtils.isNotEmpty(relationMapping.getParent())) { + FieldItem parentFieldItem = schemaItem.getSelectFields().get(relationMapping.getParent()); + Object parentVal; + try { + parentVal = getValFromRS(mapping, + resultSet, + parentFieldItem.getFieldName(), + parentFieldItem.getFieldName()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + if (parentVal != null) { + relations.put("parent", parentVal.toString()); + esFieldData.put("$parent_routing", parentVal.toString()); + + } + } + esFieldData.put(relationField, relations); + }); + } + } + + private void putRelationData(ESMapping mapping, SchemaItem schemaItem, Map dmlData, + Map esFieldData) { + // 添加父子文档关联信息 + if (!mapping.getRelations().isEmpty()) { + mapping.getRelations().forEach((relationField, relationMapping) -> { + Map relations = new HashMap<>(); + relations.put("name", relationMapping.getName()); + if (StringUtils.isNotEmpty(relationMapping.getParent())) { + FieldItem parentFieldItem = schemaItem.getSelectFields().get(relationMapping.getParent()); + String columnName = parentFieldItem.getColumnItems().iterator().next().getColumnName(); + Object parentVal = getValFromData(mapping, dmlData, parentFieldItem.getFieldName(), columnName); + if (parentVal != null) { + relations.put("parent", parentVal.toString()); + esFieldData.put("$parent_routing", parentVal.toString()); + + } + } + esFieldData.put(relationField, relations); + }); + } + } +} diff --git a/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ESConnection.java b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ESConnection.java new file mode 100644 index 0000000000..429542d7a3 --- /dev/null +++ b/client-adapter/es8x/src/main/java/com/alibaba/otter/canal/client/adapter/es8x/support/ESConnection.java @@ -0,0 +1,399 @@ +package com.alibaba.otter.canal.client.adapter.es8x.support; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateRequestBuilder; +import org.elasticsearch.client.*; +import org.elasticsearch.client.indices.GetMappingsRequest; +import org.elasticsearch.client.indices.GetMappingsResponse; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.alibaba.otter.canal.client.adapter.es.core.support.ESBulkRequest; + +/** + * ES 连接器, 只支持 Rest 方式 + * + * @author ymz 2023-03-02 + * @version 1.0.0 + */ +public class ESConnection { + + private static final Logger logger = LoggerFactory.getLogger(ESConnection.class); + + private RestHighLevelClient restHighLevelClient; + + public ESConnection(String[] hosts, Map properties) throws UnknownHostException{ + HttpHost[] httpHosts = Arrays.stream(hosts).map(this::createHttpHost).toArray(HttpHost[]::new); + RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); + String nameAndPwd = properties.get("security.auth"); + if (StringUtils.isNotEmpty(nameAndPwd) && nameAndPwd.contains(":")) { + String[] nameAndPwdArr = nameAndPwd.split(":"); + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(nameAndPwdArr[0], nameAndPwdArr[1])); + restClientBuilder.setHttpClientConfigCallback( + httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)); + } + restHighLevelClient = new RestHighLevelClientBuilder(restClientBuilder.build()).setApiCompatibilityMode(true) + .build(); + } + + public void close() { + try { + restHighLevelClient.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public MappingMetadata getMapping(String index) { + MappingMetadata mappingMetaData = null; + + Map mappings; + try { + GetMappingsRequest request = new GetMappingsRequest(); + request.indices(index); + GetMappingsResponse response = restHighLevelClient.indices().getMapping(request, RequestOptions.DEFAULT); + + mappings = response.mappings(); + } catch (NullPointerException e) { + throw new IllegalArgumentException("Not found the mapping info of index: " + index); + } catch (IOException e) { + logger.error(e.getMessage(), e); + return null; + } + mappingMetaData = mappings.get(index); + + return mappingMetaData; + } + + public class ES8xIndexRequest implements ESBulkRequest.ESIndexRequest { + + private IndexRequestBuilder indexRequestBuilder; + + private IndexRequest indexRequest; + + public ES8xIndexRequest(String index, String id){ + indexRequest = new IndexRequest(index); + indexRequest.id(id); + + } + + public ES8xIndexRequest setSource(Map source) { + + indexRequest.source(source); + + return this; + } + + public ES8xIndexRequest setRouting(String routing) { + + indexRequest.routing(routing); + + return this; + } + + public IndexRequestBuilder getIndexRequestBuilder() { + return indexRequestBuilder; + } + + public void setIndexRequestBuilder(IndexRequestBuilder indexRequestBuilder) { + this.indexRequestBuilder = indexRequestBuilder; + } + + public IndexRequest getIndexRequest() { + return indexRequest; + } + + public void setIndexRequest(IndexRequest indexRequest) { + this.indexRequest = indexRequest; + } + } + + public class ES8xUpdateRequest implements ESBulkRequest.ESUpdateRequest { + + private UpdateRequestBuilder updateRequestBuilder; + + private UpdateRequest updateRequest; + + public ES8xUpdateRequest(String index, String id){ + + updateRequest = new UpdateRequest(index, id); + } + + public ES8xUpdateRequest setDoc(Map source) { + + updateRequest.doc(source); + + return this; + } + + public ES8xUpdateRequest setDocAsUpsert(boolean shouldUpsertDoc) { + + updateRequest.docAsUpsert(shouldUpsertDoc); + + return this; + } + + public ES8xUpdateRequest setRouting(String routing) { + + updateRequest.routing(routing); + + return this; + } + + public UpdateRequestBuilder getUpdateRequestBuilder() { + return updateRequestBuilder; + } + + public void setUpdateRequestBuilder(UpdateRequestBuilder updateRequestBuilder) { + this.updateRequestBuilder = updateRequestBuilder; + } + + public UpdateRequest getUpdateRequest() { + return updateRequest; + } + + public void setUpdateRequest(UpdateRequest updateRequest) { + this.updateRequest = updateRequest; + } + } + + public class ES8xDeleteRequest implements ESBulkRequest.ESDeleteRequest { + + private DeleteRequestBuilder deleteRequestBuilder; + + private DeleteRequest deleteRequest; + + public ES8xDeleteRequest(String index, String id){ + + deleteRequest = new DeleteRequest(index, id); + + } + + public DeleteRequestBuilder getDeleteRequestBuilder() { + return deleteRequestBuilder; + } + + public void setDeleteRequestBuilder(DeleteRequestBuilder deleteRequestBuilder) { + this.deleteRequestBuilder = deleteRequestBuilder; + } + + public DeleteRequest getDeleteRequest() { + return deleteRequest; + } + + public void setDeleteRequest(DeleteRequest deleteRequest) { + this.deleteRequest = deleteRequest; + } + } + + public class ESSearchRequest { + + private SearchRequestBuilder searchRequestBuilder; + + private SearchRequest searchRequest; + + private SearchSourceBuilder sourceBuilder; + + public ESSearchRequest(String index){ + + searchRequest = new SearchRequest(index); + sourceBuilder = new SearchSourceBuilder(); + + } + + public ESSearchRequest setQuery(QueryBuilder queryBuilder) { + + sourceBuilder.query(queryBuilder); + + return this; + } + + public ESSearchRequest size(int size) { + + sourceBuilder.size(size); + + return this; + } + + public SearchResponse getResponse() { + + searchRequest.source(sourceBuilder); + try { + return restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + + } + + public SearchRequestBuilder getSearchRequestBuilder() { + return searchRequestBuilder; + } + + public void setSearchRequestBuilder(SearchRequestBuilder searchRequestBuilder) { + this.searchRequestBuilder = searchRequestBuilder; + } + + public SearchRequest getSearchRequest() { + return searchRequest; + } + + public void setSearchRequest(SearchRequest searchRequest) { + this.searchRequest = searchRequest; + } + } + + public class ES8xBulkRequest implements ESBulkRequest { + + private BulkRequestBuilder bulkRequestBuilder; + + private BulkRequest bulkRequest; + + public ES8xBulkRequest(){ + + bulkRequest = new BulkRequest(); + + } + + public void resetBulk() { + + bulkRequest = new BulkRequest(); + + } + + public ES8xBulkRequest add(ESIndexRequest esIndexRequest) { + ES8xIndexRequest eir = (ES8xIndexRequest) esIndexRequest; + + bulkRequest.add(eir.indexRequest); + + return this; + } + + public ES8xBulkRequest add(ESUpdateRequest esUpdateRequest) { + ES8xUpdateRequest eur = (ES8xUpdateRequest) esUpdateRequest; + + bulkRequest.add(eur.updateRequest); + + return this; + } + + public ES8xBulkRequest add(ESDeleteRequest esDeleteRequest) { + ES8xDeleteRequest edr = (ES8xDeleteRequest) esDeleteRequest; + + bulkRequest.add(edr.deleteRequest); + + return this; + } + + public int numberOfActions() { + return bulkRequest.numberOfActions(); + } + + public ESBulkResponse bulk() { + try { + BulkResponse responses = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT); + return new ES8xBulkResponse(responses); + } catch (IOException e) { + throw new RuntimeException(e); + } + + } + + public BulkRequestBuilder getBulkRequestBuilder() { + return bulkRequestBuilder; + } + + public void setBulkRequestBuilder(BulkRequestBuilder bulkRequestBuilder) { + this.bulkRequestBuilder = bulkRequestBuilder; + } + + public BulkRequest getBulkRequest() { + return bulkRequest; + } + + public void setBulkRequest(BulkRequest bulkRequest) { + this.bulkRequest = bulkRequest; + } + } + + public static class ES8xBulkResponse implements ESBulkRequest.ESBulkResponse { + + private BulkResponse bulkResponse; + + public ES8xBulkResponse(BulkResponse bulkResponse){ + this.bulkResponse = bulkResponse; + } + + @Override + public boolean hasFailures() { + return bulkResponse.hasFailures(); + } + + @Override + public void processFailBulkResponse(String errorMsg) { + for (BulkItemResponse itemResponse : bulkResponse.getItems()) { + if (!itemResponse.isFailed()) { + continue; + } + + if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) { + logger.error(itemResponse.getFailureMessage()); + } else { + throw new RuntimeException(errorMsg + itemResponse.getFailureMessage()); + } + } + } + } + + public RestHighLevelClient getRestHighLevelClient() { + return restHighLevelClient; + } + + public void setRestHighLevelClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + private HttpHost createHttpHost(String uriStr) { + URI uri = URI.create(uriStr); + if (!org.springframework.util.StringUtils.hasLength(uri.getUserInfo())) { + return HttpHost.create(uri.toString()); + } + try { + return HttpHost.create(new URI(uri + .getScheme(), null, uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment()) + .toString()); + } catch (URISyntaxException ex) { + throw new IllegalStateException(ex); + } + } +} diff --git a/client-adapter/es8x/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter b/client-adapter/es8x/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter new file mode 100644 index 0000000000..64078a9e55 --- /dev/null +++ b/client-adapter/es8x/src/main/resources/META-INF/canal/com.alibaba.otter.canal.client.adapter.OuterAdapter @@ -0,0 +1 @@ +es8=com.alibaba.otter.canal.client.adapter.es8x.ES8xAdapter diff --git a/client-adapter/es8x/src/main/resources/es8/biz_order.yml b/client-adapter/es8x/src/main/resources/es8/biz_order.yml new file mode 100644 index 0000000000..86a225830b --- /dev/null +++ b/client-adapter/es8x/src/main/resources/es8/biz_order.yml @@ -0,0 +1,20 @@ +dataSourceKey: defaultDS +destination: example +groupId: g1 +esMapping: + _index: customer + _id: _id + relations: + customer_order: + name: order + parent: customer_id + sql: "select concat('oid_', t.id) as _id, + t.customer_id, + t.id as order_id, + t.serial_code as order_serial, + t.c_time as order_time + from biz_order t" + skips: + - customer_id + etlCondition: "where t.c_time>={}" + commitBatch: 3000 diff --git a/client-adapter/es8x/src/main/resources/es8/customer.yml b/client-adapter/es8x/src/main/resources/es8/customer.yml new file mode 100644 index 0000000000..4010dcffc5 --- /dev/null +++ b/client-adapter/es8x/src/main/resources/es8/customer.yml @@ -0,0 +1,46 @@ +dataSourceKey: defaultDS +destination: example +groupId: g1 +esMapping: + _index: customer + _id: id + relations: + customer_order: + name: customer + sql: "select t.id, t.name, t.email from customer t" + etlCondition: "where t.c_time>={}" + commitBatch: 3000 + + +#{ +# "mappings":{ +# "_doc":{ +# "properties":{ +# "id": { +# "type": "long" +# }, +# "name": { +# "type": "text" +# }, +# "email": { +# "type": "text" +# }, +# "order_id": { +# "type": "long" +# }, +# "order_serial": { +# "type": "text" +# }, +# "order_time": { +# "type": "date" +# }, +# "customer_order":{ +# "type":"join", +# "relations":{ +# "customer":"order" +# } +# } +# } +# } +# } +#} diff --git a/client-adapter/es8x/src/main/resources/es8/mytest_user.yml b/client-adapter/es8x/src/main/resources/es8/mytest_user.yml new file mode 100644 index 0000000000..84d6eaedf8 --- /dev/null +++ b/client-adapter/es8x/src/main/resources/es8/mytest_user.yml @@ -0,0 +1,15 @@ +dataSourceKey: defaultDS +destination: example +groupId: g1 +esMapping: + _index: mytest_user + _id: _id + # upsert: true + # pk: id + sql: "select a.id as _id, a.name, a.role_id, b.role_name, + a.c_time from user a + left join role b on b.id=a.role_id" + # objFields: + # _labels: array:; + etlCondition: "where a.c_time>={}" + commitBatch: 3000 diff --git a/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/ESConnectionTest.java b/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/ESConnectionTest.java new file mode 100644 index 0000000000..d5468446df --- /dev/null +++ b/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/ESConnectionTest.java @@ -0,0 +1,44 @@ +package com.alibaba.otter.canal.client.adapter.es8x.test; + +import com.alibaba.otter.canal.client.adapter.es8x.support.ESConnection; +import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.springframework.util.Assert; + +import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.Map; + +@Ignore +public class ESConnectionTest { + + ESConnection esConnection; + + @Before + public void init() throws UnknownHostException { + String[] hosts = new String[]{"127.0.0.1:9200"}; + Map properties = new HashMap<>(); + properties.put("cluster.name", "elasticsearch"); + esConnection = new ESConnection(hosts, properties); + } + + @Test + public void test01() { + MappingMetadata mappingMetaData = esConnection.getMapping("mytest_user"); + + Map sourceMap = mappingMetaData.getSourceAsMap(); + Map esMapping = (Map) sourceMap.get("properties"); + for (Map.Entry entry : esMapping.entrySet()) { + Map value = (Map) entry.getValue(); + if (value.containsKey("properties")) { + System.out.println(entry.getKey() + " object"); + } else { + System.out.println(entry.getKey() + " " + value.get("type")); + Assert.notNull(entry.getKey(), "null column name"); + Assert.notNull(value.get("type"), "null column type"); + } + } + } +} diff --git a/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/TestConstant.java b/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/TestConstant.java new file mode 100644 index 0000000000..727371a35c --- /dev/null +++ b/client-adapter/es8x/src/test/java/com/alibaba/otter/canal/client/adapter/es8x/test/TestConstant.java @@ -0,0 +1,40 @@ +package com.alibaba.otter.canal.client.adapter.es8x.test; + +import com.alibaba.druid.pool.DruidDataSource; + +import java.sql.SQLException; + +public class TestConstant { + + public final static String jdbcUrl = "jdbc:mysql://127.0.0.1:3306/mytest?useUnicode=true"; + public final static String jdbcUser = "root"; + public final static String jdbcPassword = "121212"; + + public final static String esHosts = "127.0.0.1:9300"; + public final static String clusterName = "elasticsearch"; + + public final static DruidDataSource dataSource; + + static { + dataSource = new DruidDataSource(); + dataSource.setDriverClassName("com.mysql.jdbc.Driver"); + dataSource.setUrl(jdbcUrl); + dataSource.setUsername(jdbcUser); + dataSource.setPassword(jdbcPassword); + dataSource.setInitialSize(1); + dataSource.setMinIdle(1); + dataSource.setMaxActive(1); + dataSource.setMaxWait(60000); + dataSource.setTimeBetweenEvictionRunsMillis(60000); + dataSource.setMinEvictableIdleTimeMillis(300000); + dataSource.setPoolPreparedStatements(false); + dataSource.setMaxPoolPreparedStatementPerConnectionSize(20); + dataSource.setValidationQuery("select 1"); + try { + dataSource.init(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + +} diff --git a/client-adapter/escore/pom.xml b/client-adapter/escore/pom.xml index 0b91bb781e..9ede9649e1 100644 --- a/client-adapter/escore/pom.xml +++ b/client-adapter/escore/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -22,6 +22,7 @@ com.alibaba druid + provided diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/ESAdapter.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/ESAdapter.java index 56458fc084..f9f7844e91 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/ESAdapter.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/ESAdapter.java @@ -18,10 +18,7 @@ import com.alibaba.otter.canal.client.adapter.es.core.monitor.ESConfigMonitor; import com.alibaba.otter.canal.client.adapter.es.core.service.ESSyncService; import com.alibaba.otter.canal.client.adapter.es.core.support.ESTemplate; -import com.alibaba.otter.canal.client.adapter.support.DatasourceConfig; -import com.alibaba.otter.canal.client.adapter.support.Dml; -import com.alibaba.otter.canal.client.adapter.support.EtlResult; -import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; +import com.alibaba.otter.canal.client.adapter.support.*; /** * ES外部适配器 @@ -42,6 +39,8 @@ public abstract class ESAdapter implements OuterAdapter { protected Properties envProperties; + protected OuterAdapterConfig configuration; + public ESSyncService getEsSyncService() { return esSyncService; } @@ -58,23 +57,13 @@ public Map> getDbTableEsSyncConfig() { public void init(OuterAdapterConfig configuration, Properties envProperties) { try { this.envProperties = envProperties; + this.configuration = configuration; Map esSyncConfigTmp = ESSyncConfigLoader.load(envProperties); // 过滤不匹配的key的配置 esSyncConfigTmp.forEach((key, config) -> { - if ((config.getOuterAdapterKey() == null && configuration.getKey() == null) - || (config.getOuterAdapterKey() != null && config.getOuterAdapterKey() - .equalsIgnoreCase(configuration.getKey()))) { - esSyncConfig.put(key, config); - } + addConfig(key, config); }); - for (Map.Entry entry : esSyncConfig.entrySet()) { - String configName = entry.getKey(); - ESSyncConfig config = entry.getValue(); - - addSyncConfigToCache(configName, config); - } - esSyncService = new ESSyncService(esTemplate); esConfigMonitor = new ESConfigMonitor(); @@ -103,12 +92,12 @@ private void sync(Dml dml) { String table = dml.getTable(); Map configMap; if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - configMap = dbTableEsSyncConfig.get(StringUtils.trimToEmpty(dml.getDestination()) + "-" - + StringUtils.trimToEmpty(dml.getGroupId()) + "_" + database + "-" - + table); + configMap = dbTableEsSyncConfig + .get(StringUtils.trimToEmpty(dml.getDestination()) + "-" + StringUtils.trimToEmpty(dml.getGroupId()) + + "_" + database + "-" + table); } else { - configMap = dbTableEsSyncConfig.get(StringUtils.trimToEmpty(dml.getDestination()) + "_" + database + "-" - + table); + configMap = dbTableEsSyncConfig + .get(StringUtils.trimToEmpty(dml.getDestination()) + "_" + database + "-" + table); } if (configMap != null && !configMap.values().isEmpty()) { @@ -138,7 +127,7 @@ public String getDestination(String task) { return null; } - public void addSyncConfigToCache(String configName, ESSyncConfig config) { + private void addSyncConfigToCache(String configName, ESSyncConfig config) { Properties envProperties = this.envProperties; SchemaItem schemaItem = SqlParser.parse(config.getEsMapping().getSql()); config.getEsMapping().setSchemaItem(schemaItem); @@ -153,30 +142,60 @@ public void addSyncConfigToCache(String configName, ESSyncConfig config) { throw new RuntimeException("Not found the schema of jdbc-url: " + config.getDataSourceKey()); } String schema = matcher.group(2); - - schemaItem.getAliasTableItems() - .values() - .forEach(tableItem -> { - Map esSyncConfigMap; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - esSyncConfigMap = dbTableEsSyncConfig.computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) - + "-" - + StringUtils.trimToEmpty(config.getGroupId()) - + "_" - + schema - + "-" - + tableItem.getTableName(), + schemaItem.getAliasTableItems().values().forEach(tableItem -> { + Map esSyncConfigMap; + String schemaKey = tableItem.getSchema() == null ? schema : tableItem.getSchema(); + if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + esSyncConfigMap = dbTableEsSyncConfig + .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "-" + + StringUtils.trimToEmpty(config.getGroupId()) + "_" + schemaKey + "-" + + tableItem.getTableName(), k -> new ConcurrentHashMap<>()); - } else { - esSyncConfigMap = dbTableEsSyncConfig.computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) - + "_" - + schema - + "-" - + tableItem.getTableName(), - k -> new ConcurrentHashMap<>()); - } + } else { + esSyncConfigMap = dbTableEsSyncConfig.computeIfAbsent( + StringUtils.trimToEmpty(config.getDestination()) + "_" + schemaKey + "-" + tableItem.getTableName(), + k -> new ConcurrentHashMap<>()); + } - esSyncConfigMap.put(configName, config); - }); + esSyncConfigMap.put(configName, config); + }); + } + + public boolean addConfig(String fileName, ESSyncConfig config) { + if (match(config)) { + esSyncConfig.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, ESSyncConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey().equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + esSyncConfig.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + esSyncConfig.remove(fileName); + for (Map configMap : dbTableEsSyncConfig.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(ESSyncConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null + && config.getOuterAdapterKey().equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[] { Util.AUTO_GENERATED_PREFIX, config.getDestination(), config.getGroupId() }, '-')); + return sameMatch || prefixMatch; } } diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfig.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfig.java index 1277ebbc17..e4c31ba314 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfig.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfig.java @@ -6,6 +6,7 @@ import java.util.Map; import com.alibaba.otter.canal.client.adapter.support.AdapterConfig; +import org.springframework.beans.factory.annotation.Value; /** * ES 映射配置 @@ -28,13 +29,13 @@ public class ESSyncConfig implements AdapterConfig { private String esVersion = "es6"; public void validate() { - if (esMapping._index == null) { + if (esMapping.index == null) { throw new NullPointerException("esMapping._index"); } - if ("es6".equals(esVersion) && esMapping._type == null) { + if ("es6".equals(esVersion) && esMapping.type == null) { throw new NullPointerException("esMapping._type"); } - if (esMapping._id == null && esMapping.getPk() == null) { + if (esMapping.id == null && esMapping.getPk() == null) { throw new NullPointerException("esMapping._id or esMapping.pk"); } if (esMapping.sql == null) { @@ -96,9 +97,14 @@ public void setEsVersion(String esVersion) { public static class ESMapping implements AdapterMapping { - private String _index; - private String _type; - private String _id; + @Value("${_index}") + private String index; + + @Value("${_type}") + private String type; + + @Value("${_id}") + private String id; private boolean upsert = false; private String pk; private Map relations = new LinkedHashMap<>(); @@ -114,28 +120,28 @@ public static class ESMapping implements AdapterMapping { private SchemaItem schemaItem; // sql解析结果模型 - public String get_index() { - return _index; + public String getIndex() { + return index; } - public void set_index(String _index) { - this._index = _index; + public void setIndex(String index) { + this.index = index; } - public String get_type() { - return _type; + public String getType() { + return type; } - public void set_type(String _type) { - this._type = _type; + public void setType(String type) { + this.type = type; } - public String get_id() { - return _id; + public String getId() { + return id; } - public void set_id(String _id) { - this._id = _id; + public void setId(String id) { + this.id = id; } public boolean isUpsert() { diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfigLoader.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfigLoader.java index 6b717a5df1..3056e33445 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfigLoader.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/ESSyncConfigLoader.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.client.adapter.es.core.config; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; @@ -7,7 +8,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; /** @@ -28,7 +28,7 @@ public static synchronized Map load(Properties envProperti String esv = envProperties.getProperty("es.version"); Map configContentMap = MappingConfigsLoader.loadConfigs(esv); configContentMap.forEach((fileName, content) -> { - ESSyncConfig config = YmlConfigBinder.bindYmlToObj(null, content, ESSyncConfig.class, null, envProperties); + ESSyncConfig config = YamlUtils.ymlToObj(null, content, ESSyncConfig.class, null, envProperties); if (config == null) { return; } diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/SchemaItem.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/SchemaItem.java index 441e440fd8..550f474b57 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/SchemaItem.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/config/SchemaItem.java @@ -136,8 +136,8 @@ public TableItem getMainTable() { } public FieldItem getIdFieldItem(ESSyncConfig.ESMapping mapping) { - if (mapping.get_id() != null) { - return getSelectFields().get(mapping.get_id()); + if (mapping.getId() != null) { + return getSelectFields().get(mapping.getId()); } else { return getSelectFields().get(mapping.getPk()); } diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/monitor/ESConfigMonitor.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/monitor/ESConfigMonitor.java index 2aeea416da..fbda587ba1 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/monitor/ESConfigMonitor.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/monitor/ESConfigMonitor.java @@ -1,7 +1,6 @@ package com.alibaba.otter.canal.client.adapter.es.core.monitor; import java.io.File; -import java.util.Map; import java.util.Properties; import org.apache.commons.io.filefilter.FileFilterUtils; @@ -11,11 +10,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.es.core.ESAdapter; import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; import com.alibaba.otter.canal.client.adapter.support.Util; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; + +; public class ESConfigMonitor { @@ -36,7 +37,7 @@ public void init(ESAdapter esAdapter, Properties envProperties) { File confDir = Util.getConfDirPath(adapterName); try { FileAlterationObserver observer = new FileAlterationObserver(confDir, - FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); + FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); FileListener listener = new FileListener(); observer.addListener(listener); fileMonitor = new FileAlterationMonitor(3000, observer); @@ -63,15 +64,20 @@ public void onFileCreate(File file) { try { // 加载新增的配置文件 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName()); - ESSyncConfig config = YmlConfigBinder.bindYmlToObj(null, + ESSyncConfig config = YamlUtils.ymlToObj(null, configContent, ESSyncConfig.class, null, envProperties); if (config != null) { + // 这里要记得设置esVersion bugfix + config.setEsVersion(adapterName); config.validate(); - addConfigToCache(file, config); - logger.info("Add a new es mapping config: {} to canal adapter", file.getName()); + boolean result = esAdapter.addConfig(file.getName(), config); + if (result) { + logger.info("Add a new es mapping config: {} to canal adapter", + file.getName()); + } } } catch (Exception e) { logger.error(e.getMessage(), e); @@ -91,7 +97,7 @@ public void onFileChange(File file) { onFileDelete(file); return; } - ESSyncConfig config = YmlConfigBinder.bindYmlToObj(null, + ESSyncConfig config = YamlUtils.ymlToObj(null, configContent, ESSyncConfig.class, null, @@ -99,12 +105,10 @@ public void onFileChange(File file) { if (config == null) { return; } + // 这里要记得设置esVersion bugfix + config.setEsVersion(adapterName); config.validate(); - if (esAdapter.getEsSyncConfig().containsKey(file.getName())) { - deleteConfigFromCache(file); - } - addConfigToCache(file, config); - + esAdapter.updateConfig(file.getName(), config); logger.info("Change a es mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { @@ -118,29 +122,12 @@ public void onFileDelete(File file) { try { if (esAdapter.getEsSyncConfig().containsKey(file.getName())) { - deleteConfigFromCache(file); - + esAdapter.deleteConfig(file.getName()); logger.info("Delete a es mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - - private void addConfigToCache(File file, ESSyncConfig config) { - esAdapter.getEsSyncConfig().put(file.getName(), config); - - esAdapter.addSyncConfigToCache(file.getName(), config); - } - - private void deleteConfigFromCache(File file) { - esAdapter.getEsSyncConfig().remove(file.getName()); - for (Map configMap : esAdapter.getDbTableEsSyncConfig().values()) { - if (configMap != null) { - configMap.remove(file.getName()); - } - } - - } } } diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/service/ESSyncService.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/service/ESSyncService.java index 31e22eb6b2..4bcb951952 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/service/ESSyncService.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/service/ESSyncService.java @@ -1,19 +1,15 @@ package com.alibaba.otter.canal.client.adapter.es.core.service; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter; import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig; import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig.ESMapping; import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem; @@ -58,13 +54,13 @@ public void sync(Collection esSyncConfigs, Dml dml) { for (ESSyncConfig config : esSyncConfigs) { if (logger.isTraceEnabled()) { logger.trace("Prepared to sync index: {}, destination: {}", - config.getEsMapping().get_index(), + config.getEsMapping().getIndex(), dml.getDestination()); } this.sync(config, dml); if (logger.isTraceEnabled()) { logger.trace("Sync completed: {}, destination: {}", - config.getEsMapping().get_index(), + config.getEsMapping().getIndex(), dml.getDestination()); } } @@ -77,9 +73,9 @@ public void sync(Collection esSyncConfigs, Dml dml) { if (logger.isDebugEnabled()) { StringBuilder configIndexes = new StringBuilder(); esSyncConfigs - .forEach(esSyncConfig -> configIndexes.append(esSyncConfig.getEsMapping().get_index()).append(" ")); + .forEach(esSyncConfig -> configIndexes.append(esSyncConfig.getEsMapping().getIndex()).append(" ")); logger.debug("DML: {} \nAffected indexes: {}", - JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue), + JSON.toJSONString(dml, JSONWriter.Feature.WriteNulls), configIndexes.toString()); } } @@ -109,10 +105,10 @@ public void sync(ESSyncConfig config, Dml dml) { logger.trace("Sync elapsed time: {} ms,destination: {}, es index: {}", (System.currentTimeMillis() - begin), dml.getDestination(), - config.getEsMapping().get_index()); + config.getEsMapping().getIndex()); } } catch (Throwable e) { - logger.error("sync error, es index: {}, DML : {}", config.getEsMapping().get_index(), dml); + logger.error("sync error, es index: {}, DML : {}", config.getEsMapping().getIndex(), dml); throw new RuntimeException(e); } } @@ -209,12 +205,12 @@ private void update(ESSyncConfig config, Dml dml) { if (schemaItem.getAliasTableItems().size() == 1 && schemaItem.isAllFieldsSimple()) { // ------单表 & 所有字段都为简单字段------ - singleTableSimpleFiledUpdate(config, dml, data, old); + singleTableSimpleFiledUpdate(config, schemaItem.getMainTable().getAlias(), dml, data, old); } else { // ------主表 查询sql来更新------ if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) { ESMapping mapping = config.getEsMapping(); - String idFieldName = mapping.get_id() == null ? mapping.getPk() : mapping.get_id(); + String idFieldName = mapping.getId() == null ? mapping.getPk() : mapping.getId(); FieldItem idFieldItem = schemaItem.getSelectFields().get(idFieldName); boolean idFieldSimple = true; @@ -263,7 +259,7 @@ private void update(ESSyncConfig config, Dml dml) { // 判断主键和所更新的字段是否全为简单字段 if (idFieldSimple && allUpdateFieldSimple && !fkChanged) { - singleTableSimpleFiledUpdate(config, dml, data, old); + singleTableSimpleFiledUpdate(config, schemaItem.getMainTable().getAlias(), dml, data, old); } else { mainTableUpdate(config, dml, data, old); } @@ -340,7 +336,7 @@ private void delete(ESSyncConfig config, Dml dml) { // ------是主表------ if (schemaItem.getMainTable().getTableName().equalsIgnoreCase(dml.getTable())) { - if (mapping.get_id() != null) { + if (mapping.getId() != null) { FieldItem idFieldItem = schemaItem.getIdFieldItem(mapping); // 主键为简单字段 if (!idFieldItem.isMethod() && !idFieldItem.isBinaryOp()) { @@ -353,7 +349,7 @@ private void delete(ESSyncConfig config, Dml dml) { logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, id: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), idVal); } esTemplate.delete(mapping, idVal, null); @@ -372,7 +368,7 @@ private void delete(ESSyncConfig config, Dml dml) { logger.trace("Main table delete es index, destination:{}, table: {}, index: {}, pk: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), pkVal); } esFieldData.remove(pkFieldItem.getFieldName()); @@ -442,7 +438,7 @@ private void singleTableSimpleFiledInsert(ESSyncConfig config, Dml dml, Map d logger.trace("Main table insert to es index by query sql, destination:{}, table: {}, index: {}, sql: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), sql.replace("\n", " ")); } Util.sqlRS(ds, sql, rs -> { @@ -479,7 +475,7 @@ private void mainTableInsert(ESSyncConfig config, Dml dml, Map d "Main table insert to es index by query sql, destination:{}, table: {}, index: {}, id: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), idVal); } esTemplate.insert(mapping, idVal, esFieldData); @@ -501,7 +497,7 @@ private void mainTableDelete(ESSyncConfig config, Dml dml, Map d logger.trace("Main table delete es index by query sql, destination:{}, table: {}, index: {}, sql: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), sql.replace("\n", " ")); } Util.sqlRS(ds, sql, rs -> { @@ -523,7 +519,7 @@ private void mainTableDelete(ESSyncConfig config, Dml dml, Map d "Main table delete to es index by query sql, destination:{}, table: {}, index: {}, id: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), idVal); } esTemplate.delete(mapping, idVal, esFieldData); @@ -558,7 +554,7 @@ private void joinTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map { @@ -665,7 +661,7 @@ private void subTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map logger.trace("Join table update es index by query whole sql, destination:{}, table: {}, index: {}, sql: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), sql.toString().replace("\n", " ")); } Util.sqlRS(ds, sql.toString(), rs -> { @@ -779,7 +775,7 @@ private void wholeSqlOperation(ESSyncConfig config, Dml dml, Map .getValFromRS(mapping, rs, fieldItem.getFieldName(), fieldItem.getFieldName()); String fieldName = fieldItem.getFieldName(); // 判断是否是主键 - if (fieldName.equals(mapping.get_id())) { + if (fieldName.equals(mapping.getId())) { fieldName = "_id"; } paramsTmp.put(fieldName, value); @@ -791,7 +787,7 @@ private void wholeSqlOperation(ESSyncConfig config, Dml dml, Map "Join table update es index by query whole sql, destination:{}, table: {}, index: {}", config.getDestination(), dml.getTable(), - mapping.get_index()); + mapping.getIndex()); } esTemplate.updateByQuery(config, paramsTmp, esFieldData); } @@ -810,18 +806,18 @@ private void wholeSqlOperation(ESSyncConfig config, Dml dml, Map * @param data 单行data数据 * @param old 单行old数据 */ - private void singleTableSimpleFiledUpdate(ESSyncConfig config, Dml dml, Map data, + private void singleTableSimpleFiledUpdate(ESSyncConfig config, String owner, Dml dml, Map data, Map old) { ESMapping mapping = config.getEsMapping(); Map esFieldData = new LinkedHashMap<>(); - Object idVal = esTemplate.getESDataFromDmlData(mapping, data, old, esFieldData); + Object idVal = esTemplate.getESDataFromDmlData(mapping, owner, data, old, esFieldData); if (logger.isTraceEnabled()) { logger.trace("Main table update to es index, destination:{}, table: {}, index: {}, id: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), idVal); } esTemplate.update(mapping, idVal, esFieldData); @@ -844,7 +840,7 @@ private void mainTableUpdate(ESSyncConfig config, Dml dml, Map d logger.trace("Main table update to es index by query sql, destination:{}, table: {}, index: {}, sql: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), sql.replace("\n", " ")); } Util.sqlRS(ds, sql, rs -> { @@ -858,7 +854,7 @@ private void mainTableUpdate(ESSyncConfig config, Dml dml, Map d "Main table update to es index by query sql, destination:{}, table: {}, index: {}, id: {}", config.getDestination(), dml.getTable(), - mapping.get_index(), + mapping.getIndex(), idVal); } esTemplate.update(mapping, idVal, esFieldData); diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESSyncUtil.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESSyncUtil.java index 0e93091005..914300c303 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESSyncUtil.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESSyncUtil.java @@ -18,7 +18,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.client.adapter.es.core.config.ESSyncConfig.ESMapping; import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem; import com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.ColumnItem; diff --git a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESTemplate.java b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESTemplate.java index 2286b9155e..45fb67dee9 100644 --- a/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESTemplate.java +++ b/client-adapter/escore/src/main/java/com/alibaba/otter/canal/client/adapter/es/core/support/ESTemplate.java @@ -57,13 +57,11 @@ Object getValFromRS(ESMapping mapping, ResultSet resultSet, String fieldName, Object getIdValFromRS(ESMapping mapping, ResultSet resultSet) throws SQLException; - Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map dmlOld, - Map esFieldData) throws SQLException; + Object getESDataFromRS(ESMapping mapping, ResultSet resultSet, Map dmlOld, Map esFieldData) throws SQLException; Object getValFromData(ESMapping mapping, Map dmlData, String fieldName, String columnName); Object getESDataFromDmlData(ESMapping mapping, Map dmlData, Map esFieldData); - Object getESDataFromDmlData(ESMapping mapping, Map dmlData, Map dmlOld, - Map esFieldData); + Object getESDataFromDmlData(ESMapping mapping,String owner, Map dmlData, Map dmlOld, Map esFieldData); } diff --git a/client-adapter/hbase/pom.xml b/client-adapter/hbase/pom.xml index b930b61bc2..220044c897 100644 --- a/client-adapter/hbase/pom.xml +++ b/client-adapter/hbase/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -21,7 +21,6 @@ org.apache.hbase hbase-shaded-client - 1.1.2 org.slf4j diff --git a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java index bd78331a9e..26cd710f0d 100644 --- a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java +++ b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/HbaseAdapter.java @@ -1,9 +1,25 @@ package com.alibaba.otter.canal.client.adapter.hbase; +import com.alibaba.otter.canal.client.adapter.OuterAdapter; +import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig; +import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfigLoader; +import com.alibaba.otter.canal.client.adapter.hbase.monitor.HbaseConfigMonitor; +import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseEtlService; +import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseSyncService; +import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate; +import com.alibaba.otter.canal.client.adapter.support.Dml; +import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.FileName2KeyMapping; +import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; +import com.alibaba.otter.canal.client.adapter.support.SPI; +import com.alibaba.otter.canal.client.adapter.support.Util; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -16,18 +32,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.OuterAdapter; -import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig; -import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfigLoader; -import com.alibaba.otter.canal.client.adapter.hbase.monitor.HbaseConfigMonitor; -import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseEtlService; -import com.alibaba.otter.canal.client.adapter.hbase.service.HbaseSyncService; -import com.alibaba.otter.canal.client.adapter.hbase.support.HbaseTemplate; -import com.alibaba.otter.canal.client.adapter.support.Dml; -import com.alibaba.otter.canal.client.adapter.support.EtlResult; -import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; -import com.alibaba.otter.canal.client.adapter.support.SPI; - /** * HBase外部适配器 * @@ -49,6 +53,8 @@ public class HbaseAdapter implements OuterAdapter { private Properties envProperties; + private OuterAdapterConfig configuration; + public Map getHbaseMapping() { return hbaseMapping; } @@ -61,33 +67,12 @@ public Map> getMappingConfigCache() { public void init(OuterAdapterConfig configuration, Properties envProperties) { try { this.envProperties = envProperties; + this.configuration = configuration; Map hbaseMappingTmp = MappingConfigLoader.load(envProperties); // 过滤不匹配的key的配置 - hbaseMappingTmp.forEach((key, mappingConfig) -> { - if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null) - || (mappingConfig.getOuterAdapterKey() != null - && mappingConfig.getOuterAdapterKey().equalsIgnoreCase(configuration.getKey()))) { - hbaseMapping.put(key, mappingConfig); - } + hbaseMappingTmp.forEach((key, config) -> { + addConfig(key, config); }); - for (Map.Entry entry : hbaseMapping.entrySet()) { - String configName = entry.getKey(); - MappingConfig mappingConfig = entry.getValue(); - String k; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" - + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" - + mappingConfig.getHbaseMapping().getDatabase() + "-" - + mappingConfig.getHbaseMapping().getTable(); - } else { - k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getHbaseMapping().getDatabase() + "-" - + mappingConfig.getHbaseMapping().getTable(); - } - Map configMap = mappingConfigCache.computeIfAbsent(k, - k1 -> new ConcurrentHashMap<>()); - configMap.put(configName, mappingConfig); - } Map properties = configuration.getProperties(); @@ -223,4 +208,62 @@ public String getDestination(String task) { } return null; } + + private void addSyncConfigToCache(String configName, MappingConfig mappingConfig) { + String k; + if (envProperties != null && !"tcp" + .equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" + StringUtils + .trimToEmpty(mappingConfig.getGroupId()) + "_" + mappingConfig.getHbaseMapping() + .getDatabase() + "-" + mappingConfig.getHbaseMapping().getTable(); + } else { + k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" + mappingConfig + .getHbaseMapping().getDatabase() + "-" + mappingConfig.getHbaseMapping() + .getTable(); + } + Map configMap = mappingConfigCache + .computeIfAbsent(k, k1 -> new ConcurrentHashMap<>()); + configMap.put(configName, mappingConfig); + } + + public boolean addConfig(String fileName, MappingConfig config) { + if (match(config)) { + hbaseMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, + configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, MappingConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey() + .equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + hbaseMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + hbaseMapping.remove(fileName); + for (Map configMap : mappingConfigCache.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(MappingConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null && config.getOuterAdapterKey() + .equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[]{Util.AUTO_GENERATED_PREFIX, config.getDestination(), + config.getGroupId()}, '-')); + return sameMatch || prefixMatch; + } } diff --git a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java index 2b43492f6d..02a1d37665 100644 --- a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java +++ b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/config/MappingConfigLoader.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.client.adapter.hbase.config; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; @@ -7,7 +8,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; /** @@ -32,8 +32,7 @@ public static Map load(Properties envProperties) { Map configContentMap = MappingConfigsLoader.loadConfigs("hbase"); configContentMap.forEach((fileName, content) -> { - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, content, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, content, MappingConfig.class, null, envProperties); if (config == null) { return; } diff --git a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java index 09b7052d34..0ac3f960b7 100644 --- a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java +++ b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/monitor/HbaseConfigMonitor.java @@ -1,24 +1,19 @@ package com.alibaba.otter.canal.client.adapter.hbase.monitor; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.hbase.HbaseAdapter; import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; import com.alibaba.otter.canal.client.adapter.support.Util; - +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; +import java.io.File; +import java.util.Properties; import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - public class HbaseConfigMonitor { private static final Logger logger = LoggerFactory.getLogger(HbaseConfigMonitor.class); @@ -37,7 +32,7 @@ public void init(HbaseAdapter hbaseAdapter, Properties envProperties) { File confDir = Util.getConfDirPath(adapterName); try { FileAlterationObserver observer = new FileAlterationObserver(confDir, - FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); + FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); FileListener listener = new FileListener(); observer.addListener(listener); fileMonitor = new FileAlterationMonitor(3000, observer); @@ -64,15 +59,16 @@ public void onFileCreate(File file) { try { // 加载新增的配置文件 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName()); - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - addConfigToCache(file, config); - - logger.info("Add a new hbase mapping config: {} to canal adapter", file.getName()); + boolean result = hbaseAdapter.addConfig(file.getName(), config); + if (result) { + logger.info("Add a new hbase mapping config: {} to canal adapter", + file.getName()); + } } catch (Exception e) { logger.error(e.getMessage(), e); } @@ -91,16 +87,12 @@ public void onFileChange(File file) { onFileDelete(file); return; } - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - } - addConfigToCache(file, config); + hbaseAdapter.updateConfig(file.getName(), config); } } catch (Exception e) { logger.error(e.getMessage(), e); @@ -113,33 +105,12 @@ public void onFileDelete(File file) { try { if (hbaseAdapter.getHbaseMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - + hbaseAdapter.deleteConfig(file.getName()); logger.info("Delete a hbase mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - - private void addConfigToCache(File file, MappingConfig config) { - hbaseAdapter.getHbaseMapping().put(file.getName(), config); - Map configMap = hbaseAdapter.getMappingConfigCache() - .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "_" - + config.getHbaseMapping().getDatabase() + "-" + config.getHbaseMapping().getTable(), - k1 -> new HashMap<>()); - configMap.put(file.getName(), config); - } - - private void deleteConfigFromCache(File file) { - - hbaseAdapter.getHbaseMapping().remove(file.getName()); - for (Map configMap : hbaseAdapter.getMappingConfigCache().values()) { - if (configMap != null) { - configMap.remove(file.getName()); - } - } - - } } } diff --git a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java index e0dc127955..895d3ae8fc 100644 --- a/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java +++ b/client-adapter/hbase/src/main/java/com/alibaba/otter/canal/client/adapter/hbase/service/HbaseSyncService.java @@ -6,8 +6,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.hbase.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.hbase.support.*; import com.alibaba.otter.canal.client.adapter.support.Dml; @@ -39,7 +39,7 @@ public void sync(MappingConfig config, Dml dml) { delete(config, dml); } if (logger.isDebugEnabled()) { - logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.debug("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } } diff --git a/client-adapter/kudu/pom.xml b/client-adapter/kudu/pom.xml index 12e4ec09cd..d5108b2195 100644 --- a/client-adapter/kudu/pom.xml +++ b/client-adapter/kudu/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -21,18 +21,15 @@ org.apache.kudu kudu-client - 1.6.0 junit junit - 4.12 test mysql mysql-connector-java - 5.1.48 test diff --git a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/KuduAdapter.java b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/KuduAdapter.java index 74b825476a..409744fbd1 100644 --- a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/KuduAdapter.java +++ b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/KuduAdapter.java @@ -1,16 +1,5 @@ package com.alibaba.otter.canal.client.adapter.kudu; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.ConcurrentHashMap; - -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.alibaba.otter.canal.client.adapter.OuterAdapter; import com.alibaba.otter.canal.client.adapter.kudu.config.KuduMappingConfig; import com.alibaba.otter.canal.client.adapter.kudu.config.KuduMappingConfigLoader; @@ -20,8 +9,19 @@ import com.alibaba.otter.canal.client.adapter.kudu.support.KuduTemplate; import com.alibaba.otter.canal.client.adapter.support.Dml; import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.FileName2KeyMapping; import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; import com.alibaba.otter.canal.client.adapter.support.SPI; +import com.alibaba.otter.canal.client.adapter.support.Util; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ConcurrentHashMap; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @author liuyadong @@ -35,8 +35,6 @@ public class KuduAdapter implements OuterAdapter { private Map kuduMapping = new ConcurrentHashMap<>(); // 文件名对应配置 private Map> mappingConfigCache = new ConcurrentHashMap<>(); // 库名-表名对应配置 - private String dataSourceKey; - private KuduTemplate kuduTemplate; private KuduSyncService kuduSyncService; @@ -45,6 +43,8 @@ public class KuduAdapter implements OuterAdapter { private Properties envProperties; + private OuterAdapterConfig configuration; + public Map getKuduMapping() { return kuduMapping; } @@ -56,36 +56,17 @@ public Map> getMappingConfigCache() { @Override public void init(OuterAdapterConfig configuration, Properties envProperties) { this.envProperties = envProperties; + this.configuration = configuration; Map kuduMappingTmp = KuduMappingConfigLoader.load(envProperties); // 过滤不匹配的key的配置,获取连接key,key为配置文件名称 - kuduMappingTmp.forEach((key, mappingConfig) -> { - if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null) - || (mappingConfig.getOuterAdapterKey() != null && mappingConfig.getOuterAdapterKey() - .equalsIgnoreCase(configuration.getKey()))) { - kuduMapping.put(key, mappingConfig); - dataSourceKey = mappingConfig.getDataSourceKey(); - } + kuduMappingTmp.forEach((key, config) -> { + addConfig(key, config); }); // 判断目标字段是否为空 if (kuduMapping.isEmpty()) { throw new RuntimeException("No kudu adapter found for config key: " + configuration.getKey()); } - for (Map.Entry entry : kuduMapping.entrySet()) { - String configName = entry.getKey(); - KuduMappingConfig mappingConfig = entry.getValue(); - String k; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" - + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" - + mappingConfig.getKuduMapping().getDatabase() + "-" + mappingConfig.getKuduMapping().getTable(); - } else { - k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getKuduMapping().getDatabase() + "-" + mappingConfig.getKuduMapping().getTable(); - } - Map configMap = mappingConfigCache.computeIfAbsent(k, - k1 -> new ConcurrentHashMap<>()); - configMap.put(configName, mappingConfig); - } + Map properties = configuration.getProperties(); String kudu_master = properties.get("kudu.master.address"); @@ -203,4 +184,60 @@ public String getDestination(String task) { } return null; } + + private void addSyncConfigToCache(String configName, KuduMappingConfig mappingConfig) { + String k; + if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" + + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" + + mappingConfig.getKuduMapping().getDatabase() + "-" + mappingConfig.getKuduMapping().getTable(); + } else { + k = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" + + mappingConfig.getKuduMapping().getDatabase() + "-" + mappingConfig.getKuduMapping().getTable(); + } + Map configMap = mappingConfigCache.computeIfAbsent(k, + k1 -> new ConcurrentHashMap<>()); + configMap.put(configName, mappingConfig); + } + + public boolean addConfig(String fileName, KuduMappingConfig config) { + if (match(config)) { + kuduMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, + configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, KuduMappingConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey() + .equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + kuduMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + kuduMapping.remove(fileName); + for (Map configMap : mappingConfigCache.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(KuduMappingConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null && config.getOuterAdapterKey() + .equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[]{Util.AUTO_GENERATED_PREFIX, config.getDestination(), + config.getGroupId()}, '-')); + return sameMatch || prefixMatch; + } } diff --git a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/config/KuduMappingConfigLoader.java b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/config/KuduMappingConfigLoader.java index f2ca45ad17..f1b389ace7 100644 --- a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/config/KuduMappingConfigLoader.java +++ b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/config/KuduMappingConfigLoader.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.client.adapter.kudu.config; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; @@ -7,7 +8,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; /** @@ -30,11 +30,7 @@ public static Map load(Properties envProperties) { Map configContentMap = MappingConfigsLoader.loadConfigs("kudu"); configContentMap.forEach((fileName, content) -> { - KuduMappingConfig config = YmlConfigBinder.bindYmlToObj(null, - content, - KuduMappingConfig.class, - null, - envProperties); + KuduMappingConfig config = YamlUtils.ymlToObj(null, content, KuduMappingConfig.class, null, envProperties); if (config == null) { return; } diff --git a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/monitor/KuduConfigMonitor.java b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/monitor/KuduConfigMonitor.java index 992f2f85df..7c4e623161 100644 --- a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/monitor/KuduConfigMonitor.java +++ b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/monitor/KuduConfigMonitor.java @@ -1,24 +1,19 @@ package com.alibaba.otter.canal.client.adapter.kudu.monitor; +import com.alibaba.otter.canal.client.adapter.kudu.KuduAdapter; +import com.alibaba.otter.canal.client.adapter.kudu.config.KuduMappingConfig; +import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; +import com.alibaba.otter.canal.client.adapter.support.Util; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.io.File; -import java.util.HashMap; -import java.util.Map; import java.util.Properties; - import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; -import com.alibaba.otter.canal.client.adapter.kudu.KuduAdapter; -import com.alibaba.otter.canal.client.adapter.kudu.config.KuduMappingConfig; -import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; -import com.alibaba.otter.canal.client.adapter.support.Util; - /** * @author liuyadong * @description 配置文件监听 @@ -74,18 +69,16 @@ public void onFileCreate(File file) { try { // 加载新增的配置文件 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName()); - KuduMappingConfig config = YmlConfigBinder.bindYmlToObj(null, - configContent, - KuduMappingConfig.class, - null, - envProperties); + KuduMappingConfig config = YamlUtils + .ymlToObj(null, configContent, KuduMappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - addConfigToCache(file, config); - - logger.info("Add a new kudu mapping config: {} to canal adapter", file.getName()); + boolean result = kuduAdapter.addConfig(file.getName(), config); + if (result) { + logger.info("Add a new kudu mapping config: {} to canal adapter", file.getName()); + } } catch (Exception e) { logger.error(e.getMessage(), e); } @@ -98,25 +91,19 @@ public void onFileChange(File file) { try { if (kuduAdapter.getKuduMapping().containsKey(file.getName())) { // 加载配置文件 - String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator - + file.getName()); + String configContent = MappingConfigsLoader + .loadConfig(adapterName + File.separator + file.getName()); if (configContent == null) { onFileDelete(file); return; } - KuduMappingConfig config = YmlConfigBinder.bindYmlToObj(null, - configContent, - KuduMappingConfig.class, - null, - envProperties); + KuduMappingConfig config = YamlUtils + .ymlToObj(null, configContent, KuduMappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if (kuduAdapter.getKuduMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - } - addConfigToCache(file, config); + kuduAdapter.updateConfig(file.getName(), config); } } catch (Exception e) { logger.error(e.getMessage(), e); @@ -129,42 +116,12 @@ public void onFileDelete(File file) { try { if (kuduAdapter.getKuduMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); + kuduAdapter.deleteConfig(file.getName()); logger.info("Delete a hbase mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - - /** - * 添加配置文件信息到缓存 - * - * @param file - * @param config - */ - private void addConfigToCache(File file, KuduMappingConfig config) { - kuduAdapter.getKuduMapping().put(file.getName(), config); - Map configMap = kuduAdapter.getMappingConfigCache() - .computeIfAbsent(StringUtils.trimToEmpty(config.getDestination()) + "." - + config.getKuduMapping().getDatabase() + "." + config.getKuduMapping().getTable(), - k1 -> new HashMap<>()); - configMap.put(file.getName(), config); - } - - /** - * 从缓存中删除配置 - * - * @param file 文件 - */ - private void deleteConfigFromCache(File file) { - kuduAdapter.getKuduMapping().remove(file.getName()); - for (Map configMap : kuduAdapter.getMappingConfigCache().values()) { - if (configMap != null) { - configMap.remove(file.getName()); - } - } - } - } } diff --git a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/service/KuduSyncService.java b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/service/KuduSyncService.java index d388b41059..116a766165 100644 --- a/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/service/KuduSyncService.java +++ b/client-adapter/kudu/src/main/java/com/alibaba/otter/canal/client/adapter/kudu/service/KuduSyncService.java @@ -10,8 +10,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.kudu.config.KuduMappingConfig; import com.alibaba.otter.canal.client.adapter.kudu.support.KuduTemplate; import com.alibaba.otter.canal.client.adapter.support.Dml; @@ -55,7 +55,7 @@ public void sync(KuduMappingConfig config, Dml dml) { delete(config, dml); } if (logger.isDebugEnabled()) { - logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.debug("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } } @@ -118,7 +118,7 @@ private void delete(KuduMappingConfig config, Dml dml) { } } catch (KuduException e) { logger.error(e.getMessage()); - logger.error("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.error("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } } @@ -159,7 +159,7 @@ private void upsert(KuduMappingConfig config, Dml dml) { } } catch (KuduException e) { logger.error(e.getMessage()); - logger.error("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.error("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } @@ -201,7 +201,7 @@ private void insert(KuduMappingConfig config, Dml dml) { } } catch (KuduException e) { logger.error(e.getMessage()); - logger.error("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.error("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } } diff --git a/client-adapter/launcher/pom.xml b/client-adapter/launcher/pom.xml index 1ff6f8d281..5b3f14ac79 100644 --- a/client-adapter/launcher/pom.xml +++ b/client-adapter/launcher/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -17,7 +17,7 @@ org.springframework.boot spring-boot-dependencies - 2.0.1.RELEASE + 2.5.4 pom import @@ -61,7 +61,7 @@ postgresql - com.oracle + com.oracle.database.jdbc ojdbc6 @@ -128,6 +128,19 @@ jar-with-dependencies provided + + com.alibaba.otter + client-adapter.es8x + ${project.version} + + + * + * + + + jar-with-dependencies + provided + com.alibaba.otter client-adapter.rdb diff --git a/client-adapter/launcher/src/main/assembly/dev.xml b/client-adapter/launcher/src/main/assembly/dev.xml index fa84908a0b..beed58cfef 100644 --- a/client-adapter/launcher/src/main/assembly/dev.xml +++ b/client-adapter/launcher/src/main/assembly/dev.xml @@ -43,6 +43,13 @@ **/* + + ../es8x/src/main/resources/es8 + /conf/es8 + + **/* + + ../hbase/src/main/resources/hbase /conf/hbase diff --git a/client-adapter/launcher/src/main/assembly/release.xml b/client-adapter/launcher/src/main/assembly/release.xml index 920b563880..140ce36f9b 100644 --- a/client-adapter/launcher/src/main/assembly/release.xml +++ b/client-adapter/launcher/src/main/assembly/release.xml @@ -44,6 +44,13 @@ **/* + + ../es8x/src/main/resources/es8 + /conf/es8 + + **/* + + ../hbase/src/main/resources/hbase /conf/hbase diff --git a/client-adapter/launcher/src/main/bin/startup.sh b/client-adapter/launcher/src/main/bin/startup.sh index bd97b2fdb4..04e0e222e3 100644 --- a/client-adapter/launcher/src/main/bin/startup.sh +++ b/client-adapter/launcher/src/main/bin/startup.sh @@ -56,11 +56,27 @@ in exit;; esac +JavaVersion=`$JAVA -version 2>&1 |awk 'NR==1{ gsub(/"/,""); print $3 }' | awk -F '.' '{print $1}'` str=`file -L $JAVA | grep 64-bit` + +JAVA_OPTS="$JAVA_OPTS -Xss1m -XX:+AggressiveOpts -XX:-UseBiasedLocking -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$base/logs" +if [ $JavaVersion -ge 11 ] ; then + #JAVA_OPTS="$JAVA_OPTS -Xlog:gc*:$base_log/gc.log:time " + JAVA_OPTS="$JAVA_OPTS" +else + #JAVA_OPTS="$JAVA_OPTS -Xloggc:$base/logs/canal/gc.log -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime" + JAVA_OPTS="$JAVA_OPTS -XX:+UseFastAccessorMethods -XX:+PrintAdaptiveSizePolicy -XX:+PrintTenuringDistribution" +fi + if [ -n "$str" ]; then - JAVA_OPTS="-server -Xms2048m -Xmx3072m -Xmn1024m -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -Xss256k -XX:-UseAdaptiveSizePolicy -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError" + if [ $JavaVersion -ge 11 ] ; then + # For G1 + JAVA_OPTS="-server -Xms2g -Xmx3g -XX:+UseG1GC -XX:MaxGCPauseMillis=250 -XX:+UseGCOverheadLimit -XX:+ExplicitGCInvokesConcurrent $JAVA_OPTS" + else + JAVA_OPTS="-server -Xms2g -Xmx3g -Xmn1g -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC $JAVA_OPTS" + fi else - JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m " + JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m $JAVA_OPTS" fi JAVA_OPTS=" $JAVA_OPTS -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dfile.encoding=UTF-8" diff --git a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java index 64cab2e5d0..2e3501dd31 100644 --- a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java +++ b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/CanalAdapterApplication.java @@ -3,6 +3,7 @@ import org.springframework.boot.Banner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; /** * 启动入口 @@ -10,9 +11,11 @@ * @author rewerma @ 2018-10-20 * @version 1.0.0 */ -@SpringBootApplication +@SpringBootApplication(exclude= { DataSourceAutoConfiguration.class}) public class CanalAdapterApplication { public static void main(String[] args) { + // 支持rocketmq client 配置日志路径 + System.setProperty("rocketmq.client.logUseSlf4j","true"); SpringApplication application = new SpringApplication(CanalAdapterApplication.class); application.setBannerMode(Banner.Mode.OFF); diff --git a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AdapterProcessor.java b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AdapterProcessor.java index 5fa812fb1d..30a267da98 100644 --- a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AdapterProcessor.java +++ b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/AdapterProcessor.java @@ -7,7 +7,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,36 +22,37 @@ import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; import com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer; import com.alibaba.otter.canal.connector.core.spi.ExtensionLoader; +import com.alibaba.otter.canal.connector.core.spi.ProxyCanalMsgConsumer; /** * 适配处理器 - * + * * @author rewerma 2020-02-01 * @version 1.0.0 */ public class AdapterProcessor { - private static final Logger logger = LoggerFactory.getLogger(AdapterProcessor.class); + private static final Logger logger = LoggerFactory.getLogger(AdapterProcessor.class); - private static final String CONNECTOR_SPI_DIR = "/plugin"; - private static final String CONNECTOR_STANDBY_SPI_DIR = "/canal-adapter/plugin"; + private static final String CONNECTOR_SPI_DIR = "/plugin"; + private static final String CONNECTOR_STANDBY_SPI_DIR = "/canal-adapter/plugin"; - private CanalMsgConsumer canalMsgConsumer; + private CanalMsgConsumer canalMsgConsumer; - private String canalDestination; // canal实例 - private String groupId = null; // groupId - private List> canalOuterAdapters; // 外部适配器 - private CanalClientConfig canalClientConfig; // 配置 - private ExecutorService groupInnerExecutorService; // 组内工作线程池 - private volatile boolean running = false; // 是否运行中 - private Thread thread = null; - private Thread.UncaughtExceptionHandler handler = (t, e) -> logger + private String canalDestination; // canal实例 + private String groupId = null; // groupId + private List> canalOuterAdapters; // 外部适配器 + private CanalClientConfig canalClientConfig; // 配置 + private ExecutorService groupInnerExecutorService; // 组内工作线程池 + private volatile boolean running = false; // 是否运行中 + private Thread thread = null; + private Thread.UncaughtExceptionHandler handler = (t, e) -> logger .error("parse events has an error", e); - private SyncSwitch syncSwitch; + private SyncSwitch syncSwitch; public AdapterProcessor(CanalClientConfig canalClientConfig, String destination, String groupId, - List> canalOuterAdapters){ + List> canalOuterAdapters) { this.canalClientConfig = canalClientConfig; this.canalDestination = destination; this.groupId = groupId; @@ -63,17 +63,15 @@ public AdapterProcessor(CanalClientConfig canalClientConfig, String destination, // load connector consumer ExtensionLoader loader = new ExtensionLoader<>(CanalMsgConsumer.class); - canalMsgConsumer = loader - .getExtension(canalClientConfig.getMode().toLowerCase(),destination ,CONNECTOR_SPI_DIR, CONNECTOR_STANDBY_SPI_DIR); + canalMsgConsumer = new ProxyCanalMsgConsumer(loader + .getExtension(canalClientConfig.getMode().toLowerCase(), destination, CONNECTOR_SPI_DIR, + CONNECTOR_STANDBY_SPI_DIR)); Properties properties = canalClientConfig.getConsumerProperties(); properties.put(CanalConstants.CANAL_MQ_FLAT_MESSAGE, canalClientConfig.getFlatMessage()); properties.put(CanalConstants.CANAL_ALIYUN_ACCESS_KEY, canalClientConfig.getAccessKey()); properties.put(CanalConstants.CANAL_ALIYUN_SECRET_KEY, canalClientConfig.getSecretKey()); - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - Thread.currentThread().setContextClassLoader(canalMsgConsumer.getClass().getClassLoader()); canalMsgConsumer.init(properties, canalDestination, groupId); - Thread.currentThread().setContextClassLoader(cl); } public void start() { @@ -88,6 +86,7 @@ public void start() { public void writeOut(final List commonMessages) { List> futures = new ArrayList<>(); // 组间适配器并行运行 + // 当 canalOuterAdapters 初始化失败时,消息将会全部丢失 canalOuterAdapters.forEach(outerAdapters -> { futures.add(groupInnerExecutorService.submit(() -> { try { @@ -171,7 +170,7 @@ private void process() { } int retry = canalClientConfig.getRetries() == null - || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries(); + || canalClientConfig.getRetries() == 0 ? 1 : canalClientConfig.getRetries(); if (retry == -1) { // 重试次数-1代表异常时一直阻塞重试 retry = Integer.MAX_VALUE; diff --git a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java index 20e3741483..9f0819d4a7 100644 --- a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java +++ b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/loader/CanalAdapterLoader.java @@ -16,12 +16,15 @@ import org.springframework.core.env.Environment; import org.springframework.core.env.PropertySource; import org.springframework.core.env.StandardEnvironment; +import org.springframework.util.CollectionUtils; import com.alibaba.otter.canal.adapter.launcher.config.SpringContext; import com.alibaba.otter.canal.client.adapter.OuterAdapter; +import com.alibaba.otter.canal.client.adapter.ProxyOuterAdapter; import com.alibaba.otter.canal.client.adapter.support.CanalClientConfig; import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader; import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; +import com.alibaba.otter.canal.client.adapter.support.Util; /** * 外部适配器的加载器 @@ -50,162 +53,49 @@ public void init() { for (CanalClientConfig.CanalAdapter canalAdapter : canalClientConfig.getCanalAdapters()) { for (CanalClientConfig.Group group : canalAdapter.getGroups()) { + int autoGenId = 0; List> canalOuterAdapterGroups = new CopyOnWriteArrayList<>(); List canalOuterAdapters = new CopyOnWriteArrayList<>(); + for (OuterAdapterConfig config : group.getOuterAdapters()) { + // 保证一定有key + if (StringUtils.isEmpty(config.getKey())) { + String key = StringUtils.join( + new String[] { Util.AUTO_GENERATED_PREFIX, canalAdapter.getInstance(), group.getGroupId(), + String.valueOf(autoGenId) }, + '-'); + //gen keyId + config.setKey(key); + } + autoGenId++; loadAdapter(config, canalOuterAdapters); } canalOuterAdapterGroups.add(canalOuterAdapters); - - AdapterProcessor adapterProcessor = canalAdapterProcessors.computeIfAbsent(canalAdapter.getInstance() - + "|" - + StringUtils.trimToEmpty(group.getGroupId()), + // canalOuterAdapters 存在初始化失败的情况,导致canalOuterAdapters的数量,可能小于group.getOuterAdapters + // 由于group下的 所有OuterAdapter实例都会重复消费同一批消息,因此不允许部分adapter初始化成功,必须全部初始化成功才允许消费 + if(CollectionUtils.isEmpty(canalOuterAdapters) || canalOuterAdapters.size() != group.getOuterAdapters().size() ){ + String msg = String.format("instance=%s,groupId=%s Load OuterAdapters is Empty,pls check rdb.yml", + canalAdapter.getInstance(),group.getGroupId()); + throw new RuntimeException(msg); + } + AdapterProcessor adapterProcessor = canalAdapterProcessors.computeIfAbsent( + canalAdapter.getInstance() + "|" + StringUtils.trimToEmpty(group.getGroupId()), f -> new AdapterProcessor(canalClientConfig, canalAdapter.getInstance(), group.getGroupId(), canalOuterAdapterGroups)); adapterProcessor.start(); - - logger.info("Start adapter for canal-client mq topic: {} succeed", canalAdapter.getInstance() + "-" - + group.getGroupId()); + logger.info("Start adapter for canal-client mq topic: {} succeed", + canalAdapter.getInstance() + "-" + group.getGroupId()); } } - - // if ("tcp".equalsIgnoreCase(canalClientConfig.getMode())) { - // // 初始化canal-client的适配器 - // for (CanalClientConfig.CanalAdapter canalAdapter : - // canalClientConfig.getCanalAdapters()) { - // List> canalOuterAdapterGroups = new - // CopyOnWriteArrayList<>(); - // - // for (CanalClientConfig.Group connectorGroup : - // canalAdapter.getGroups()) { - // List canalOutConnectors = new CopyOnWriteArrayList<>(); - // for (OuterAdapterConfig c : connectorGroup.getOuterAdapters()) { - // loadAdapter(c, canalOutConnectors); - // } - // canalOuterAdapterGroups.add(canalOutConnectors); - // } - // CanalAdapterWorker worker; - // if (StringUtils.isNotEmpty(canalServerHost)) { - // worker = new CanalAdapterWorker(canalClientConfig, - // canalAdapter.getInstance(), - // canalServerHost, - // zkHosts, - // canalOuterAdapterGroups); - // } else if (zkHosts != null) { - // worker = new CanalAdapterWorker(canalClientConfig, - // canalAdapter.getInstance(), - // zkHosts, - // canalOuterAdapterGroups); - // } else { - // throw new RuntimeException("No canal server connector found"); - // } - // canalWorkers.put(canalAdapter.getInstance(), worker); - // worker.start(); - // logger.info("Start adapter for canal instance: {} succeed", - // canalAdapter.getInstance()); - // } - // } else if ("kafka".equalsIgnoreCase(canalClientConfig.getMode())) { - // // 初始化canal-client-kafka的适配器 - // for (CanalClientConfig.CanalAdapter canalAdapter : - // canalClientConfig.getCanalAdapters()) { - // for (CanalClientConfig.Group group : canalAdapter.getGroups()) { - // List> canalOuterAdapterGroups = new - // CopyOnWriteArrayList<>(); - // List canalOuterAdapters = new CopyOnWriteArrayList<>(); - // for (OuterAdapterConfig config : group.getOuterAdapters()) { - // loadAdapter(config, canalOuterAdapters); - // } - // canalOuterAdapterGroups.add(canalOuterAdapters); - // - // CanalAdapterKafkaWorker canalKafkaWorker = new - // CanalAdapterKafkaWorker(canalClientConfig, - // canalClientConfig.getMqServers(), - // canalAdapter.getInstance(), - // group.getGroupId(), - // canalOuterAdapterGroups, - // canalClientConfig.getFlatMessage()); - // canalMQWorker.put(canalAdapter.getInstance() + "-kafka-" + - // group.getGroupId(), canalKafkaWorker); - // canalKafkaWorker.start(); - // logger.info("Start adapter for canal-client mq topic: {} succeed", - // canalAdapter.getInstance() + "-" + group.getGroupId()); - // } - // } - // } else if ("rocketMQ".equalsIgnoreCase(canalClientConfig.getMode())) - // { - // // 初始化canal-client-rocketMQ的适配器 - // for (CanalClientConfig.CanalAdapter canalAdapter : - // canalClientConfig.getCanalAdapters()) { - // for (CanalClientConfig.Group group : canalAdapter.getGroups()) { - // List> canalOuterAdapterGroups = new - // CopyOnWriteArrayList<>(); - // List canalOuterAdapters = new CopyOnWriteArrayList<>(); - // for (OuterAdapterConfig config : group.getOuterAdapters()) { - // loadAdapter(config, canalOuterAdapters); - // } - // canalOuterAdapterGroups.add(canalOuterAdapters); - // CanalAdapterRocketMQWorker rocketMQWorker = new - // CanalAdapterRocketMQWorker(canalClientConfig, - // canalClientConfig.getMqServers(), - // canalAdapter.getInstance(), - // group.getGroupId(), - // canalOuterAdapterGroups, - // canalClientConfig.getAccessKey(), - // canalClientConfig.getSecretKey(), - // canalClientConfig.getFlatMessage(), - // canalClientConfig.isEnableMessageTrace(), - // canalClientConfig.getCustomizedTraceTopic(), - // canalClientConfig.getAccessChannel(), - // canalClientConfig.getNamespace()); - // canalMQWorker.put(canalAdapter.getInstance() + "-rocketmq-" + - // group.getGroupId(), rocketMQWorker); - // rocketMQWorker.start(); - // - // logger.info("Start adapter for canal-client mq topic: {} succeed", - // canalAdapter.getInstance() + "-" + group.getGroupId()); - // } - // } - // } else if ("rabbitMQ".equalsIgnoreCase(canalClientConfig.getMode())) - // { - // // 初始化canal-client-rabbitMQ的适配器 - // for (CanalClientConfig.CanalAdapter canalAdapter : - // canalClientConfig.getCanalAdapters()) { - // for (CanalClientConfig.Group group : canalAdapter.getGroups()) { - // List> canalOuterAdapterGroups = new - // CopyOnWriteArrayList<>(); - // List canalOuterAdapters = new CopyOnWriteArrayList<>(); - // for (OuterAdapterConfig config : group.getOuterAdapters()) { - // loadAdapter(config, canalOuterAdapters); - // } - // canalOuterAdapterGroups.add(canalOuterAdapters); - // CanalAdapterRabbitMQWorker rabbitMQWork = new - // CanalAdapterRabbitMQWorker(canalClientConfig, - // canalOuterAdapterGroups, - // canalAdapter.getInstance(), - // group.getGroupId(), - // canalClientConfig.getFlatMessage()); - // canalMQWorker.put(canalAdapter.getInstance() + "-rabbitmq-" + - // group.getGroupId(), rabbitMQWork); - // rabbitMQWork.start(); - // - // logger.info("Start adapter for canal-client mq topic: {} succeed", - // canalAdapter.getInstance() + "-" + group.getGroupId()); - // } - // } - // // CanalAdapterRabbitMQWork - // } } private void loadAdapter(OuterAdapterConfig config, List canalOutConnectors) { try { OuterAdapter adapter; - adapter = loader.getExtension(config.getName(), StringUtils.trimToEmpty(config.getKey())); + adapter = new ProxyOuterAdapter(loader.getExtension(config.getName(), config.getKey())); - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - // 替换ClassLoader - Thread.currentThread().setContextClassLoader(adapter.getClass().getClassLoader()); Environment env = (Environment) SpringContext.getBean(Environment.class); Properties evnProperties = null; if (env instanceof StandardEnvironment) { @@ -223,7 +113,7 @@ private void loadAdapter(OuterAdapterConfig config, List canalOutC } } adapter.init(config, evnProperties); - Thread.currentThread().setContextClassLoader(cl); + // rdb文件解析异常时,canalOuterAdapters 无法正常加载 canalOutConnectors.add(adapter); logger.info("Load canal adapter: {} succeed", config.getName()); } catch (Exception e) { diff --git a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java index 32b39f4794..291504876c 100644 --- a/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java +++ b/client-adapter/launcher/src/main/java/com/alibaba/otter/canal/adapter/launcher/rest/CommonRest.java @@ -1,15 +1,21 @@ package com.alibaba.otter.canal.adapter.launcher.rest; +import com.alibaba.otter.canal.adapter.launcher.common.EtlLock; +import com.alibaba.otter.canal.adapter.launcher.common.SyncSwitch; +import com.alibaba.otter.canal.adapter.launcher.config.AdapterCanalConfig; +import com.alibaba.otter.canal.client.adapter.OuterAdapter; +import com.alibaba.otter.canal.client.adapter.support.EtlResult; +import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader; +import com.alibaba.otter.canal.client.adapter.support.FileName2KeyMapping; +import com.alibaba.otter.canal.client.adapter.support.Result; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.PostConstruct; import javax.annotation.Resource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.bind.annotation.GetMapping; @@ -19,14 +25,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import com.alibaba.otter.canal.adapter.launcher.common.EtlLock; -import com.alibaba.otter.canal.adapter.launcher.common.SyncSwitch; -import com.alibaba.otter.canal.adapter.launcher.config.AdapterCanalConfig; -import com.alibaba.otter.canal.client.adapter.OuterAdapter; -import com.alibaba.otter.canal.client.adapter.support.EtlResult; -import com.alibaba.otter.canal.client.adapter.support.ExtensionLoader; -import com.alibaba.otter.canal.client.adapter.support.Result; - /** * 适配器操作Rest * @@ -66,6 +64,9 @@ public void init() { @PostMapping("/etl/{type}/{key}/{task}") public EtlResult etl(@PathVariable String type, @PathVariable String key, @PathVariable String task, @RequestParam(name = "params", required = false) String params) { + if (key == null) { + key = FileName2KeyMapping.getKey(type, task); + } OuterAdapter adapter = loader.getExtension(type, key); String destination = adapter.getDestination(task); String lockKey = destination == null ? task : destination; @@ -133,6 +134,9 @@ public EtlResult etl(@PathVariable String type, @PathVariable String task, */ @GetMapping("/count/{type}/{key}/{task}") public Map count(@PathVariable String type, @PathVariable String key, @PathVariable String task) { + if (key == null) { + key = FileName2KeyMapping.getKey(type, task); + } OuterAdapter adapter = loader.getExtension(type, key); return adapter.count(task); } diff --git a/client-adapter/launcher/src/main/resources/application.yml b/client-adapter/launcher/src/main/resources/application.yml index 889b401abb..5b8317ccf7 100644 --- a/client-adapter/launcher/src/main/resources/application.yml +++ b/client-adapter/launcher/src/main/resources/application.yml @@ -11,7 +11,7 @@ canal.conf: flatMessage: true zookeeperHosts: syncBatchSize: 1000 - retries: 0 + retries: -1 timeout: accessKey: secretKey: @@ -64,6 +64,8 @@ canal.conf: # jdbc.url: jdbc:mysql://127.0.0.1:3306/mytest2?useUnicode=true # jdbc.username: root # jdbc.password: 121212 +# druid.stat.enable: false +# druid.stat.slowSqlMillis: 1000 # - name: rdb # key: oracle1 # properties: @@ -91,14 +93,14 @@ canal.conf: # mode: transport # or rest # # security.auth: test:123456 # only used for rest mode # cluster.name: elasticsearch -# - name: kudu -# key: kudu -# properties: -# kudu.master.address: 127.0.0.1 # ',' split multi address -# - name: phoenix -# key: phoenix -# properties: -# jdbc.driverClassName: org.apache.phoenix.jdbc.PhoenixDriver -# jdbc.url: jdbc:phoenix:127.0.0.1:2181:/hbase/db -# jdbc.username: -# jdbc.password: \ No newline at end of file +# - name: kudu +# key: kudu +# properties: +# kudu.master.address: 127.0.0.1 # ',' split multi address +# - name: phoenix +# key: phoenix +# properties: +# jdbc.driverClassName: org.apache.phoenix.jdbc.PhoenixDriver +# jdbc.url: jdbc:phoenix:127.0.0.1:2181:/hbase/db +# jdbc.username: +# jdbc.password: diff --git a/client-adapter/launcher/src/main/resources/bootstrap.yml b/client-adapter/launcher/src/main/resources/bootstrap.yml index c36f305fb2..7ce2188cab 100644 --- a/client-adapter/launcher/src/main/resources/bootstrap.yml +++ b/client-adapter/launcher/src/main/resources/bootstrap.yml @@ -1,6 +1,6 @@ -#canal: -# manager: -# jdbc: -# url: jdbc:mysql://127.0.0.1:3306/canal_manager?useUnicode=true&characterEncoding=UTF-8 -# username: root -# password: 121212 \ No newline at end of file +canal: + manager: + jdbc: + url: jdbc:mysql://127.0.0.1:3306/canal_manager?useUnicode=true&characterEncoding=UTF-8 + username: canal + password: canal diff --git a/client-adapter/launcher/src/main/resources/logback.xml b/client-adapter/launcher/src/main/resources/logback.xml index 4092e22170..93c3541512 100644 --- a/client-adapter/launcher/src/main/resources/logback.xml +++ b/client-adapter/launcher/src/main/resources/logback.xml @@ -34,6 +34,23 @@ + + ../logs/adapter/rocketmq_client.log + + + ../logs/adapter/%d{yyyy-MM-dd}/rocketmq_client-%d{yyyy-MM-dd}-%i.log.gz + + + 512MB + + 60 + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{56} - %msg%n + + + @@ -54,6 +71,10 @@ + + + + diff --git a/client-adapter/logger/pom.xml b/client-adapter/logger/pom.xml index 09f63ab125..dd01c41f09 100644 --- a/client-adapter/logger/pom.xml +++ b/client-adapter/logger/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -20,7 +20,6 @@ - diff --git a/client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java b/client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java index 8a195d6533..c4526b2677 100644 --- a/client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java +++ b/client-adapter/logger/src/main/java/com/alibaba/otter/canal/client/adapter/logger/LoggerAdapterExample.java @@ -6,8 +6,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.OuterAdapter; import com.alibaba.otter.canal.client.adapter.support.Dml; import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; @@ -36,7 +36,7 @@ public void sync(List dmls) { } public void sync(Dml dml) { - logger.info("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.info("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } @Override diff --git a/client-adapter/phoenix/pom.xml b/client-adapter/phoenix/pom.xml index bfaf89eac2..41af5949c3 100644 --- a/client-adapter/phoenix/pom.xml +++ b/client-adapter/phoenix/pom.xml @@ -1,11 +1,9 @@ - + canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -19,13 +17,12 @@ org.apache.phoenix phoenix-core - 4.14.1-HBase-1.4 org.apache.hbase hbase-client - 1.4.8 + com.google.protobuf protobuf-java @@ -41,50 +38,41 @@ org.slf4j slf4j-api - 1.7.12 provided commons-lang commons-lang - 2.6 provided commons-io commons-io - 2.4 provided com.google.guava guava - 18.0 provided com.alibaba druid - 1.1.9 provided junit junit - 4.12 test - com.alibaba - fastjson - 1.2.58 + com.alibaba.fastjson2 + fastjson2 provided - mysql mysql-connector-java - 5.1.48 provided @@ -123,19 +111,19 @@ - + - + - + diff --git a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/PhoenixAdapter.java b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/PhoenixAdapter.java index 218d9128d2..b67623d0ac 100644 --- a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/PhoenixAdapter.java +++ b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/PhoenixAdapter.java @@ -43,7 +43,7 @@ public class PhoenixAdapter implements OuterAdapter { private Properties envProperties; - + private OuterAdapterConfig configuration; public Map getPhoenixMapping() { return phoenixMapping; @@ -64,14 +64,11 @@ public PhoenixAdapter() { @Override public void init(OuterAdapterConfig configuration, Properties envProperties) { this.envProperties = envProperties; + this.configuration = configuration; Map phoenixMappingTmp = ConfigLoader.load(envProperties); // 过滤不匹配的key的配置 - phoenixMappingTmp.forEach((key, mappingConfig) -> { - if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null) - || (mappingConfig.getOuterAdapterKey() != null - && mappingConfig.getOuterAdapterKey().equalsIgnoreCase(configuration.getKey()))) { - phoenixMapping.put(key, mappingConfig); - } + phoenixMappingTmp.forEach((key, config) -> { + addConfig(key, config); }); if (phoenixMapping.isEmpty()) { @@ -80,24 +77,6 @@ public void init(OuterAdapterConfig configuration, Properties envProperties) { logger.info("[{}]phoenix config mapping: {}", this, phoenixMapping.keySet()); } - for (Map.Entry entry : phoenixMapping.entrySet()) { - String configName = entry.getKey(); - MappingConfig mappingConfig = entry.getValue(); - String key; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" - + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable().toLowerCase(); - } else { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable().toLowerCase(); - } - Map configMap = mappingConfigCache.computeIfAbsent(key, - k1 -> new ConcurrentHashMap<>()); - configMap.put(configName, mappingConfig); - } - - Map properties = configuration.getProperties(); DriverClass= properties.get("jdbc.driverClassName"); @@ -288,4 +267,60 @@ public void destroy() { phoenixSyncService.close(); } } + + private void addSyncConfigToCache(String configName, MappingConfig mappingConfig) { + String key; + if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" + + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable().toLowerCase(); + } else { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable().toLowerCase(); + } + Map configMap = mappingConfigCache.computeIfAbsent(key, + k1 -> new ConcurrentHashMap<>()); + configMap.put(configName, mappingConfig); + } + + public boolean addConfig(String fileName, MappingConfig config) { + if (match(config)) { + phoenixMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, + configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, MappingConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey() + .equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + phoenixMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + phoenixMapping.remove(fileName); + for (Map configMap : mappingConfigCache.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(MappingConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null && config.getOuterAdapterKey() + .equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[]{Util.AUTO_GENERATED_PREFIX, config.getDestination(), + config.getGroupId()}, '-')); + return sameMatch || prefixMatch; + } } diff --git a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/config/ConfigLoader.java b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/config/ConfigLoader.java index f5c753b32a..0e60e7c47e 100644 --- a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/config/ConfigLoader.java +++ b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/config/ConfigLoader.java @@ -1,7 +1,7 @@ package com.alibaba.otter.canal.client.adapter.phoenix.config; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,8 +28,7 @@ public static Map load(Properties envProperties) { Map configContentMap = MappingConfigsLoader.loadConfigs("phoenix"); configContentMap.forEach((fileName, content) -> { - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, content, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, content, MappingConfig.class, null, envProperties); if (config == null) { return; } diff --git a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/monitor/PhoenixConfigMonitor.java b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/monitor/PhoenixConfigMonitor.java index 0c78491864..8f9f12714e 100644 --- a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/monitor/PhoenixConfigMonitor.java +++ b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/monitor/PhoenixConfigMonitor.java @@ -2,36 +2,32 @@ import com.alibaba.otter.canal.client.adapter.phoenix.PhoenixAdapter; import com.alibaba.otter.canal.client.adapter.phoenix.config.MappingConfig; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; import com.alibaba.otter.canal.client.adapter.support.Util; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; +import java.io.File; +import java.util.Properties; import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - /** * phoenix config monitor */ public class PhoenixConfigMonitor { - private static final Logger logger = LoggerFactory.getLogger(PhoenixConfigMonitor.class); + private static final Logger logger = LoggerFactory.getLogger(PhoenixConfigMonitor.class); - private static final String adapterName = "phoenix"; //相应组件名字 + private static final String adapterName = "phoenix"; // 相应组件名字 - private String key; + private String key; - private PhoenixAdapter phoenixAdapter; //相应适配器名实现类 + private PhoenixAdapter phoenixAdapter; // 相应适配器名实现类 - private Properties envProperties; + private Properties envProperties; private FileAlterationMonitor fileMonitor; @@ -42,7 +38,7 @@ public void init(String key, PhoenixAdapter phoenixAdapter, Properties envProper File confDir = Util.getConfDirPath(adapterName); try { FileAlterationObserver observer = new FileAlterationObserver(confDir, - FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); + FileFilterUtils.and(FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter("yml"))); FileListener listener = new FileListener(); observer.addListener(listener); fileMonitor = new FileAlterationMonitor(3000, observer); @@ -69,16 +65,14 @@ public void onFileCreate(File file) { try { // 加载新增的配置文件 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName()); - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils + .ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if ((key == null && config.getOuterAdapterKey() == null) - || (key != null && key.equals(config.getOuterAdapterKey()))) { - addConfigToCache(file, config); - + boolean result = phoenixAdapter.addConfig(file.getName(), config); + if (result) { logger.info("Add a new phoenix mapping config: {} to canal adapter", file.getName()); } } catch (Exception e) { @@ -94,27 +88,18 @@ public void onFileChange(File file) { if (phoenixAdapter.getPhoenixMapping().containsKey(file.getName())) { // 加载配置文件 String configContent = MappingConfigsLoader - .loadConfig(adapterName + File.separator + file.getName()); + .loadConfig(adapterName + File.separator + file.getName()); if (configContent == null) { onFileDelete(file); return; } - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils + .ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if ((key == null && config.getOuterAdapterKey() == null) - || (key != null && key.equals(config.getOuterAdapterKey()))) { - if (phoenixAdapter.getPhoenixMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - } - addConfigToCache(file, config); - } else { - // 不能修改outerAdapterKey - throw new RuntimeException("Outer adapter key not allowed modify"); - } + phoenixAdapter.updateConfig(file.getName(), config); logger.info("Change a phoenix mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { @@ -128,40 +113,12 @@ public void onFileDelete(File file) { try { if (phoenixAdapter.getPhoenixMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - + phoenixAdapter.deleteConfig(file.getName()); logger.info("Delete a phoenix mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - - private void addConfigToCache(File file, MappingConfig mappingConfig) { - if (mappingConfig == null || mappingConfig.getDbMapping() == null) { - return; - } - phoenixAdapter.getPhoenixMapping().put(file.getName(), mappingConfig); - Map configMap = phoenixAdapter.getMappingConfigCache() - .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" - + mappingConfig.getDbMapping().getTable().toLowerCase(), - k1 -> new HashMap<>()); - configMap.put(file.getName(), mappingConfig); - } - - private void deleteConfigFromCache(File file) { - logger.info("deleteConfigFromCache: {}", file.getName()); - MappingConfig mappingConfig = phoenixAdapter.getPhoenixMapping().remove(file.getName()); - - if (mappingConfig == null || mappingConfig.getDbMapping() == null) { - return; - } - for (Map configMap : phoenixAdapter.getMappingConfigCache().values()) { - if (configMap != null) { - configMap.remove(file.getName()); - } - } - } } } diff --git a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixEtlService.java b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixEtlService.java index 23fab256c7..06cfb45e2e 100644 --- a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixEtlService.java +++ b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixEtlService.java @@ -1,5 +1,17 @@ package com.alibaba.otter.canal.client.adapter.phoenix.service; +import java.sql.*; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.alibaba.otter.canal.client.adapter.phoenix.PhoenixAdapter; import com.alibaba.otter.canal.client.adapter.phoenix.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.phoenix.config.MappingConfig.DbMapping; @@ -10,16 +22,6 @@ import com.alibaba.otter.canal.client.adapter.support.EtlResult; import com.alibaba.otter.canal.client.adapter.support.Util; import com.google.common.base.Joiner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.sql.DataSource; -import java.sql.*; -import java.util.*; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; /** * Phoenix ETL 操作业务类 @@ -41,16 +43,17 @@ static boolean syncSchema(Connection targetDSConnection, MappingConfig config) { if (srcDataSource == null) { return false; } - try { - return syncSchema(srcDataSource.getConnection(), targetDSConnection, config); + + try (Connection conn = srcDataSource.getConnection()) { + return syncSchema(conn, targetDSConnection, config); } catch (SQLException e) { throw new RuntimeException(e); } } private static boolean syncSchema(DataSource srcDS,Connection targetDSConnection, MappingConfig config) { - try { - return syncSchema(srcDS.getConnection(),targetDSConnection, config); + try (Connection conn = srcDS.getConnection()) { + return syncSchema(conn, targetDSConnection, config); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixSyncService.java b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixSyncService.java index cdfd63f11c..11d7c3296f 100644 --- a/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixSyncService.java +++ b/client-adapter/phoenix/src/main/java/com/alibaba/otter/canal/client/adapter/phoenix/service/PhoenixSyncService.java @@ -6,8 +6,8 @@ import com.alibaba.druid.sql.ast.statement.*; import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.util.JdbcConstants; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.phoenix.config.ConfigurationManager; import com.alibaba.otter.canal.client.adapter.phoenix.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.phoenix.config.MappingConfig.DbMapping; @@ -208,7 +208,7 @@ public void sync(Map> mappingConfig, List entry : configMap.entrySet()) { MappingConfig config = entry.getValue(); if (config.isDebug()) { - logger.info("DML: {} {}", entry.getKey(), JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.info("DML: {} {}", entry.getKey(), JSON.toJSONString(dml, Feature.WriteNulls)); } if (config.getConcurrent()) { //并行同步 @@ -257,7 +257,7 @@ private void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml d } else if (type != null && type.equalsIgnoreCase("TRUNCATE")) { truncate(batchExecutor, config); } else if (logger.isInfoEnabled()){ - logger.info("SingleDml: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.info("SingleDml: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } catch (SQLException e) { logger.error("sync error: " + e.getMessage(), e); @@ -268,7 +268,7 @@ private void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml d private void alter(BatchExecutor batchExecutor, MappingConfig config, Dml dml, List stmtList, String configFile) throws SQLException { if (config.isDebug()) { - logger.info("DML: {} {}", configFile, JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.info("DML: {} {}", configFile, JSON.toJSONString(dml, Feature.WriteNulls)); } DbMapping dbMapping = config.getDbMapping(); if (!dbMapping.isAlter()) { diff --git a/client-adapter/pom.xml b/client-adapter/pom.xml index 48644afffc..d8630e3084 100644 --- a/client-adapter/pom.xml +++ b/client-adapter/pom.xml @@ -4,12 +4,12 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter canal.client-adapter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT pom canal client adapter module for otter ${project.version} @@ -20,6 +20,7 @@ 1.8 1.8 UTF-8 + 2.17.0 @@ -30,6 +31,7 @@ rdb es6x es7x + es8x escore kudu phoenix @@ -52,7 +54,7 @@ central - http://repo1.maven.org/maven2 + https://repo1.maven.org/maven2 true @@ -62,7 +64,7 @@ java.net - http://download.java.net/maven/2/ + https://download.java.net/maven/2/ true @@ -72,7 +74,7 @@ aliyun - http://maven.aliyun.com/nexus/content/groups/public/ + https://maven.aliyun.com/nexus/content/groups/public/ true @@ -109,38 +111,12 @@ org.springframework.cloud spring-cloud-context - 2.0.0.RELEASE + 3.0.6 - org.springframework - spring-core - 5.0.5.RELEASE - - - org.springframework - spring-aop - 5.0.5.RELEASE - - - org.springframework - spring-context - 5.0.5.RELEASE - - - org.springframework - spring-jdbc - 5.0.5.RELEASE - - - org.springframework - spring-orm - 5.0.5.RELEASE - - - org.springframework - spring-test - 5.0.5.RELEASE - test + org.springframework.boot + spring-boot + 2.5.4 com.h2database @@ -151,47 +127,46 @@ org.apache.rocketmq rocketmq-client - 4.5.1 + ${rocketmq_version} org.apache.kafka kafka-clients - 1.1.1 + ${kafka_version} - + com.rabbitmq amqp-client - 5.5.0 + ${rabbitmq_version} com.alibaba.mq-amqp mq-amqp-client - 1.0.3 + ${mq_amqp_client} mysql mysql-connector-java - 5.1.48 + ${mysql_driver_version} org.postgresql postgresql - 42.1.4 + 42.5.1 - com.oracle + com.oracle.database.jdbc ojdbc6 - 11.2.0.3 + 11.2.0.4 com.microsoft.sqlserver mssql-jdbc 7.0.0.jre8 - org.apache.hbase hbase-shaded-client @@ -207,6 +182,37 @@ + + org.apache.phoenix + phoenix-core + 4.14.1-HBase-1.4 + + + org.apache.hbase + hbase-client + 1.4.8 + + + com.aliyun.openservices + tablestore + 5.10.3 + jar-with-dependencies + + + com.google.protobuf + protobuf-java + + + org.apache.httpcomponents + httpasyncclient + + + + + org.apache.kudu + kudu-client + 1.6.0 + org.apache.curator curator-recipes @@ -226,8 +232,9 @@ org.yaml snakeyaml - 1.19 + 1.29 + org.powermock powermock-api-mockito @@ -258,25 +265,6 @@ 1.9.0 test - - - com.aliyun.openservices - tablestore - 5.10.3 - jar-with-dependencies - - - com.google.protobuf - protobuf-java - - - org.apache.httpcomponents - httpasyncclient - - - - - diff --git a/client-adapter/rdb/pom.xml b/client-adapter/rdb/pom.xml index 534532d00e..ad27a942f5 100644 --- a/client-adapter/rdb/pom.xml +++ b/client-adapter/rdb/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -30,7 +30,7 @@ test - com.oracle + com.oracle.database.jdbc ojdbc6 test @@ -42,7 +42,6 @@ junit junit - 4.12 test diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java index be77ab49a9..f428740515 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/RdbAdapter.java @@ -26,11 +26,7 @@ import com.alibaba.otter.canal.client.adapter.rdb.service.RdbMirrorDbSyncService; import com.alibaba.otter.canal.client.adapter.rdb.service.RdbSyncService; import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil; -import com.alibaba.otter.canal.client.adapter.support.Dml; -import com.alibaba.otter.canal.client.adapter.support.EtlResult; -import com.alibaba.otter.canal.client.adapter.support.OuterAdapterConfig; -import com.alibaba.otter.canal.client.adapter.support.SPI; -import com.alibaba.otter.canal.client.adapter.support.Util; +import com.alibaba.otter.canal.client.adapter.support.*; /** * RDB适配器实现类 @@ -56,6 +52,8 @@ public class RdbAdapter implements OuterAdapter { private Properties envProperties; + private OuterAdapterConfig configuration; + public Map getRdbMapping() { return rdbMapping; } @@ -76,49 +74,22 @@ public Map getMirrorDbConfigCache() { @Override public void init(OuterAdapterConfig configuration, Properties envProperties) { this.envProperties = envProperties; - + this.configuration = configuration; + // 从jdbc url获取db类型 Map properties = configuration.getProperties(); String dbType = JdbcUtils.getDbType(properties.get("jdbc.url"), null); - + // 当.yml文件编码格式存在问题,此处rdb yml文件构建 可能会抛出异常 Map rdbMappingTmp = ConfigLoader.load(envProperties); // 过滤不匹配的key的配置 - rdbMappingTmp.forEach((key, mappingConfig) -> { - if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null) - || (mappingConfig.getOuterAdapterKey() != null && mappingConfig.getOuterAdapterKey() - .equalsIgnoreCase(configuration.getKey()))) { - rdbMapping.put(key, mappingConfig); - } + rdbMappingTmp.forEach((key, config) -> { + addConfig(key, config); }); if (rdbMapping.isEmpty()) { throw new RuntimeException("No rdb adapter found for config key: " + configuration.getKey()); } - for (Map.Entry entry : rdbMapping.entrySet()) { - String configName = entry.getKey(); - MappingConfig mappingConfig = entry.getValue(); - if (!mappingConfig.getDbMapping().getMirrorDb()) { - String key; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" - + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); - } else { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); - } - Map configMap = mappingConfigCache.computeIfAbsent(key, - k1 -> new ConcurrentHashMap<>()); - configMap.put(configName, mappingConfig); - } else { - // mirrorDB - String key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "." - + mappingConfig.getDbMapping().getDatabase(); - mirrorDbConfigCache.put(key, MirrorDbConfig.create(configName, mappingConfig)); - } - } - // 初始化连接池 dataSource = new DruidDataSource(); dataSource.setDriverClassName(properties.get("jdbc.driverClassName")); @@ -182,7 +153,9 @@ public void sync(List dmls) { return; } try { - rdbSyncService.sync(mappingConfigCache, dmls, envProperties); + if (!mappingConfigCache.isEmpty()) { + rdbSyncService.sync(mappingConfigCache, dmls, envProperties); + } rdbMirrorDbSyncService.sync(dmls); } catch (Exception e) { throw new RuntimeException(e); @@ -306,4 +279,67 @@ public void destroy() { dataSource.close(); } } + + private void addSyncConfigToCache(String configName, MappingConfig mappingConfig) { + if (!mappingConfig.getDbMapping().getMirrorDb()) { + String key; + if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" + + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); + } else { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); + } + Map configMap = mappingConfigCache.computeIfAbsent(key, + k1 -> new ConcurrentHashMap<>()); + configMap.put(configName, mappingConfig); + } else { + // mirrorDB + String key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "." + + mappingConfig.getDbMapping().getDatabase(); + mirrorDbConfigCache.put(key, MirrorDbConfig.create(configName, mappingConfig)); + } + } + + public boolean addConfig(String fileName, MappingConfig config) { + if (match(config)) { + rdbMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, + configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, MappingConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey() + .equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + rdbMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + rdbMapping.remove(fileName); + for (Map configMap : mappingConfigCache.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(MappingConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null && config.getOuterAdapterKey() + .equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[]{Util.AUTO_GENERATED_PREFIX, config.getDestination(), + config.getGroupId()}, '-')); + return sameMatch || prefixMatch; + } } diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java index d65e0ec53e..cf57c75d9a 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/config/ConfigLoader.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.client.adapter.rdb.config; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; @@ -7,7 +8,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; /** @@ -32,8 +32,7 @@ public static Map load(Properties envProperties) { Map configContentMap = MappingConfigsLoader.loadConfigs("rdb"); configContentMap.forEach((fileName, content) -> { - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, content, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, content, MappingConfig.class, null, envProperties); if (config == null) { return; } diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java index 159748ba92..794b021a73 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/monitor/RdbConfigMonitor.java @@ -1,25 +1,19 @@ package com.alibaba.otter.canal.client.adapter.rdb.monitor; +import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter; +import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig; +import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; +import com.alibaba.otter.canal.client.adapter.support.Util; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import java.io.File; -import java.util.HashMap; -import java.util.Map; import java.util.Properties; - import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.monitor.FileAlterationListenerAdaptor; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; -import com.alibaba.otter.canal.client.adapter.rdb.RdbAdapter; -import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig; -import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig; -import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; -import com.alibaba.otter.canal.client.adapter.support.Util; - public class RdbConfigMonitor { private static final Logger logger = LoggerFactory.getLogger(RdbConfigMonitor.class); @@ -68,16 +62,14 @@ public void onFileCreate(File file) { try { // 加载新增的配置文件 String configContent = MappingConfigsLoader.loadConfig(adapterName + File.separator + file.getName()); - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils + .ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if ((key == null && config.getOuterAdapterKey() == null) - || (key != null && key.equals(config.getOuterAdapterKey()))) { - addConfigToCache(file, config); - + boolean result = rdbAdapter.addConfig(file.getName(), config); + if (result) { logger.info("Add a new rdb mapping config: {} to canal adapter", file.getName()); } } catch (Exception e) { @@ -98,22 +90,13 @@ public void onFileChange(File file) { onFileDelete(file); return; } - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, configContent, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils + .ymlToObj(null, configContent, MappingConfig.class, null, envProperties); if (config == null) { return; } config.validate(); - if ((key == null && config.getOuterAdapterKey() == null) - || (key != null && key.equals(config.getOuterAdapterKey()))) { - if (rdbAdapter.getRdbMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); - } - addConfigToCache(file, config); - } else { - // 不能修改outerAdapterKey - throw new RuntimeException("Outer adapter key not allowed modify"); - } + rdbAdapter.updateConfig(file.getName(), config); logger.info("Change a rdb mapping config: {} of canal adapter", file.getName()); } } catch (Exception e) { @@ -127,7 +110,7 @@ public void onFileDelete(File file) { try { if (rdbAdapter.getRdbMapping().containsKey(file.getName())) { - deleteConfigFromCache(file); + rdbAdapter.deleteConfig(file.getName()); logger.info("Delete a rdb mapping config: {} of canal adapter", file.getName()); } @@ -135,47 +118,5 @@ public void onFileDelete(File file) { logger.error(e.getMessage(), e); } } - - private void addConfigToCache(File file, MappingConfig mappingConfig) { - if (mappingConfig == null || mappingConfig.getDbMapping() == null) { - return; - } - rdbAdapter.getRdbMapping().put(file.getName(), mappingConfig); - if (!mappingConfig.getDbMapping().getMirrorDb()) { - Map configMap = rdbAdapter.getMappingConfigCache() - .computeIfAbsent(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" - + mappingConfig.getDbMapping().getTable(), - k1 -> new HashMap<>()); - configMap.put(file.getName(), mappingConfig); - } else { - Map mirrorDbConfigCache = rdbAdapter.getMirrorDbConfigCache(); - mirrorDbConfigCache.put(StringUtils.trimToEmpty(mappingConfig.getDestination()) + "." - + mappingConfig.getDbMapping().getDatabase(), - MirrorDbConfig.create(file.getName(), mappingConfig)); - } - } - - private void deleteConfigFromCache(File file) { - MappingConfig mappingConfig = rdbAdapter.getRdbMapping().remove(file.getName()); - - if (mappingConfig == null || mappingConfig.getDbMapping() == null) { - return; - } - if (!mappingConfig.getDbMapping().getMirrorDb()) { - for (Map configMap : rdbAdapter.getMappingConfigCache().values()) { - if (configMap != null) { - configMap.remove(file.getName()); - } - } - } else { - rdbAdapter.getMirrorDbConfigCache().forEach((key, mirrorDbConfig) -> { - if (mirrorDbConfig.getFileName().equals(file.getName())) { - rdbAdapter.getMirrorDbConfigCache().remove(key); - } - }); - } - - } } } diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java index d1a511bd9b..18a73404ec 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbEtlService.java @@ -58,6 +58,7 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va Map columnsMap = new LinkedHashMap<>(); Map columnType = new LinkedHashMap<>(); DruidDataSource dataSource = (DruidDataSource) srcDS; + String backtick = SyncUtil.getBacktickByDbType(dataSource.getDbType()); Util.sqlRS(targetDS, "SELECT * FROM " + SyncUtil.getDbTableName(dbMapping, dataSource.getDbType()) + " LIMIT 1 ", @@ -91,7 +92,7 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va .append(SyncUtil.getDbTableName(dbMapping, dataSource.getDbType())) .append(" ("); columnsMap - .forEach((targetColumnName, srcColumnName) -> insertSql.append(targetColumnName).append(",")); + .forEach((targetColumnName, srcColumnName) -> insertSql.append(backtick).append(targetColumnName).append(backtick).append(",")); int len = insertSql.length(); insertSql.delete(len - 1, len).append(") VALUES ("); @@ -101,6 +102,7 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va } len = insertSql.length(); insertSql.delete(len - 1, len).append(")"); + logger.info("executeSqlImport sql:{}",insertSql.toString()); try (Connection connTarget = targetDS.getConnection(); PreparedStatement pstmt = connTarget.prepareStatement(insertSql.toString())) { connTarget.setAutoCommit(false); @@ -114,7 +116,7 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va Map pkVal = new LinkedHashMap<>(); StringBuilder deleteSql = new StringBuilder( "DELETE FROM " + SyncUtil.getDbTableName(dbMapping, dataSource.getDbType()) + " WHERE "); - appendCondition(dbMapping, deleteSql, pkVal, rs); + appendCondition(dbMapping, deleteSql, pkVal, rs, backtick); try (PreparedStatement pstmt2 = connTarget.prepareStatement(deleteSql.toString())) { int k = 1; for (Object val : pkVal.values()) { @@ -132,7 +134,6 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va } Integer type = columnType.get(targetClolumnName.toLowerCase()); - Object value = rs.getObject(srcColumnName); if (value != null) { SyncUtil.setPStmt(type, pstmt, value, i); @@ -180,7 +181,7 @@ protected boolean executeSqlImport(DataSource srcDS, String sql, List va * 拼接目标表主键where条件 */ private static void appendCondition(DbMapping dbMapping, StringBuilder sql, Map values, - ResultSet rs) throws SQLException { + ResultSet rs, String backtick) throws SQLException { // 拼接主键 for (Map.Entry entry : dbMapping.getTargetPk().entrySet()) { String targetColumnName = entry.getKey(); @@ -188,7 +189,7 @@ private static void appendCondition(DbMapping dbMapping, StringBuilder sql, Map< if (srcColumnName == null) { srcColumnName = targetColumnName; } - sql.append(targetColumnName).append("=? AND "); + sql.append(backtick).append(targetColumnName).append(backtick).append("=? AND "); values.put(targetColumnName, rs.getObject(srcColumnName)); } int len = sql.length(); diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java index 795eb2e782..ac1fe59a8d 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbMirrorDbSyncService.java @@ -12,8 +12,8 @@ import org.slf4j.LoggerFactory; import com.alibaba.druid.pool.DruidDataSource; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.rdb.config.MirrorDbConfig; import com.alibaba.otter.canal.client.adapter.rdb.support.SyncUtil; @@ -71,7 +71,7 @@ public void sync(List dmls) { // DDL if (logger.isDebugEnabled()) { - logger.debug("DDL: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.debug("DDL: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } executeDdl(mirrorDbConfig, dml); rdbSyncService.getColumnsTypeCache().remove(destination + "." + database + "." + dml.getTable()); diff --git a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java index 3b5abaeab5..ac9db959b8 100644 --- a/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java +++ b/client-adapter/rdb/src/main/java/com/alibaba/otter/canal/client/adapter/rdb/service/RdbSyncService.java @@ -21,8 +21,8 @@ import org.slf4j.LoggerFactory; import com.alibaba.druid.pool.DruidDataSource; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig; import com.alibaba.otter.canal.client.adapter.rdb.config.MappingConfig.DbMapping; import com.alibaba.otter.canal.client.adapter.rdb.support.BatchExecutor; @@ -232,10 +232,10 @@ public void sync(BatchExecutor batchExecutor, MappingConfig config, SingleDml dm truncate(batchExecutor, config); } if (logger.isDebugEnabled()) { - logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.debug("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } } catch (SQLException e) { - logger.error("Failed to sync single DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.error("Failed to sync single DML: {}", JSON.toJSONString(dml, Feature.WriteMapNullValue)); throw new RuntimeException(e); } } diff --git a/client-adapter/tablestore/pom.xml b/client-adapter/tablestore/pom.xml index 0a76468de9..a6187e05ae 100644 --- a/client-adapter/tablestore/pom.xml +++ b/client-adapter/tablestore/pom.xml @@ -3,7 +3,7 @@ canal.client-adapter com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -30,7 +30,6 @@ com.aliyun.openservices tablestore - 5.10.3 jar-with-dependencies @@ -43,8 +42,6 @@ - - diff --git a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/TablestoreAdapter.java b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/TablestoreAdapter.java index b39b729476..3e6b877243 100644 --- a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/TablestoreAdapter.java +++ b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/TablestoreAdapter.java @@ -1,6 +1,8 @@ package com.alibaba.otter.canal.client.adapter.tablestore; +import com.alibaba.otter.canal.client.adapter.support.FileName2KeyMapping; +import com.alibaba.otter.canal.client.adapter.support.Util; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; @@ -55,62 +57,14 @@ public void init(OuterAdapterConfig configuration, Properties envProperties) { this.configuration = configuration; Map tablestoreMappingTmp = ConfigLoader.load(envProperties); // 过滤不匹配的key的配置 - tablestoreMappingTmp.forEach((key, mappingConfig) -> { - if ((mappingConfig.getOuterAdapterKey() == null && configuration.getKey() == null) - || (mappingConfig.getOuterAdapterKey() != null && mappingConfig.getOuterAdapterKey() - .equalsIgnoreCase(configuration.getKey()))) { - tablestoreMapping.put(key, mappingConfig); - mappingConfig.getDbMapping().init(mappingConfig); - } + tablestoreMappingTmp.forEach((key, config) -> { + addConfig(key, config); }); if (tablestoreMapping.isEmpty()) { throw new RuntimeException("No tablestore adapter found for config key: " + configuration.getKey()); } - Map properties = configuration.getProperties(); - - for (Map.Entry entry : tablestoreMapping.entrySet()) { - String configName = entry.getKey(); - MappingConfig mappingConfig = entry.getValue(); - String key; - if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" - + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); - } else { - key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" - + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); - } - Map configMap = mappingConfigCache.computeIfAbsent(key, - k1 -> new ConcurrentHashMap<>()); - configMap.put(configName, mappingConfig); - - - // 构建对应的 TableStoreWriter - ServiceCredentials credentials = new DefaultCredentials( - properties.get(PropertyConstants.TABLESTORE_ACCESSSECRETID), - properties.get(PropertyConstants.TABLESTORE_ACCESSSECRETKEY) - ); - - - WriterConfig config = getWriterConfig(mappingConfig); - - TableStoreWriter writer = new DefaultTableStoreWriter( - properties.get(PropertyConstants.TABLESTORE_ENDPOINT), - credentials, - properties.get(PropertyConstants.TABLESTORE_INSTANCENAME), - mappingConfig.getDbMapping().getTargetTable(), - config, - null - ); - - Map config2writerMap = writerCache.computeIfAbsent(key, - k1 -> new ConcurrentHashMap<>()); - config2writerMap.put(configName, writer); - - } - tablestoreSyncService = new TablestoreSyncService(); } @@ -315,4 +269,84 @@ public void destroy() { } } } + + private void addSyncConfigToCache(String configName, MappingConfig mappingConfig) { + Map properties = configuration.getProperties(); + String key; + if (envProperties != null && !"tcp".equalsIgnoreCase(envProperties.getProperty("canal.conf.mode"))) { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "-" + + StringUtils.trimToEmpty(mappingConfig.getGroupId()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); + } else { + key = StringUtils.trimToEmpty(mappingConfig.getDestination()) + "_" + + mappingConfig.getDbMapping().getDatabase() + "-" + mappingConfig.getDbMapping().getTable(); + } + Map configMap = mappingConfigCache.computeIfAbsent(key, + k1 -> new ConcurrentHashMap<>()); + configMap.put(configName, mappingConfig); + + + // 构建对应的 TableStoreWriter + ServiceCredentials credentials = new DefaultCredentials( + properties.get(PropertyConstants.TABLESTORE_ACCESSSECRETID), + properties.get(PropertyConstants.TABLESTORE_ACCESSSECRETKEY) + ); + + + WriterConfig config = getWriterConfig(mappingConfig); + + TableStoreWriter writer = new DefaultTableStoreWriter( + properties.get(PropertyConstants.TABLESTORE_ENDPOINT), + credentials, + properties.get(PropertyConstants.TABLESTORE_INSTANCENAME), + mappingConfig.getDbMapping().getTargetTable(), + config, + null + ); + + Map config2writerMap = writerCache.computeIfAbsent(key, + k1 -> new ConcurrentHashMap<>()); + config2writerMap.put(configName, writer); + } + + public boolean addConfig(String fileName, MappingConfig config) { + if (match(config)) { + tablestoreMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + FileName2KeyMapping.register(getClass().getAnnotation(SPI.class).value(), fileName, + configuration.getKey()); + return true; + } + return false; + } + + public void updateConfig(String fileName, MappingConfig config) { + if (config.getOuterAdapterKey() != null && !config.getOuterAdapterKey() + .equals(configuration.getKey())) { + // 理论上不允许改这个 因为本身就是通过这个关联起Adapter和Config的 + throw new RuntimeException("not allow to change outAdapterKey"); + } + tablestoreMapping.put(fileName, config); + addSyncConfigToCache(fileName, config); + } + + public void deleteConfig(String fileName) { + tablestoreMapping.remove(fileName); + for (Map configMap : mappingConfigCache.values()) { + if (configMap != null) { + configMap.remove(fileName); + } + } + FileName2KeyMapping.unregister(getClass().getAnnotation(SPI.class).value(), fileName); + } + + private boolean match(MappingConfig config) { + boolean sameMatch = config.getOuterAdapterKey() != null && config.getOuterAdapterKey() + .equalsIgnoreCase(configuration.getKey()); + boolean prefixMatch = config.getOuterAdapterKey() == null && configuration.getKey() + .startsWith(StringUtils + .join(new String[]{Util.AUTO_GENERATED_PREFIX, config.getDestination(), + config.getGroupId()}, '-')); + return sameMatch || prefixMatch; + } } diff --git a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/config/ConfigLoader.java b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/config/ConfigLoader.java index f064fdfa59..19b4a8f073 100644 --- a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/config/ConfigLoader.java +++ b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/config/ConfigLoader.java @@ -1,7 +1,7 @@ package com.alibaba.otter.canal.client.adapter.tablestore.config; -import com.alibaba.otter.canal.client.adapter.config.YmlConfigBinder; import com.alibaba.otter.canal.client.adapter.support.MappingConfigsLoader; +import com.alibaba.otter.canal.client.adapter.support.YamlUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; @@ -32,8 +32,7 @@ public static Map load(Properties envProperties) { Map configContentMap = MappingConfigsLoader.loadConfigs("tablestore"); configContentMap.forEach((fileName, content) -> { - MappingConfig config = YmlConfigBinder - .bindYmlToObj(null, content, MappingConfig.class, null, envProperties); + MappingConfig config = YamlUtils.ymlToObj(null, content, MappingConfig.class, null, envProperties); if (config == null) { return; } @@ -45,7 +44,8 @@ public static Map load(Properties envProperties) { result.put(fileName, config); }); - logger.info("## Tablestore mapping config loaded:" + StringUtils.collectionToCommaDelimitedString(result.keySet())); + logger.info( + "## Tablestore mapping config loaded:" + StringUtils.collectionToCommaDelimitedString(result.keySet())); return result; } } diff --git a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/service/TablestoreSyncService.java b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/service/TablestoreSyncService.java index 45b3bbba4d..b62b7f911b 100644 --- a/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/service/TablestoreSyncService.java +++ b/client-adapter/tablestore/src/main/java/com/alibaba/otter/canal/client/adapter/tablestore/service/TablestoreSyncService.java @@ -6,8 +6,8 @@ import java.util.concurrent.Future; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.client.adapter.tablestore.enums.TablestoreFieldType; import com.alibaba.otter.canal.client.adapter.tablestore.support.SyncUtil; import com.alicloud.openservices.tablestore.TableStoreWriter; @@ -59,7 +59,7 @@ public Future sync(MappingConfig mappingConfig, Future future = writer.addRowChangeWithFuture(rowChanges); if (logger.isDebugEnabled()) { - logger.debug("DML: {}", JSON.toJSONString(dml, SerializerFeature.WriteMapNullValue)); + logger.debug("DML: {}", JSON.toJSONString(dml, Feature.WriteNulls)); } return future; } diff --git a/client/pom.xml b/client/pom.xml index 70a6107613..1b00a8c413 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter @@ -50,10 +50,6 @@ commons-codec commons-codec - - com.alibaba - fastjson - com.google.guava guava @@ -105,37 +101,44 @@ org.apache.rocketmq rocketmq-client - 4.5.2 + ${rocketmq_version} provided org.apache.rocketmq rocketmq-acl - 4.5.2 + ${rocketmq_version} provided com.rabbitmq amqp-client - 5.5.0 + ${rabbitmq_version} provided com.alibaba.mq-amqp mq-amqp-client - 1.0.3 + ${mq_amqp_client} provided org.apache.kafka kafka-clients - 1.1.1 + ${kafka_version} provided org.apache.pulsar pulsar-client + ${pulsar_version} + provided + + + org.apache.pulsar + pulsar-client-admin + ${pulsar_version} provided diff --git a/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java b/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java index aa8c41f849..f0c1df2417 100644 --- a/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java +++ b/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaCanalConnector.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.client.kafka; +import com.alibaba.fastjson.JSON; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; @@ -10,7 +11,6 @@ import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.StringDeserializer; -import com.alibaba.fastjson.JSON; import com.alibaba.otter.canal.client.CanalMQConnector; import com.alibaba.otter.canal.client.impl.SimpleCanalConnector; import com.alibaba.otter.canal.protocol.FlatMessage; @@ -43,7 +43,7 @@ public class KafkaCanalConnector implements CanalMQConnector { private Map currentOffsets = new ConcurrentHashMap<>(); public KafkaCanalConnector(String servers, String topic, Integer partition, String groupId, Integer batchSize, - boolean flatMessage){ + boolean flatMessage){ this.topic = topic; this.partition = partition; this.flatMessage = flatMessage; @@ -61,11 +61,11 @@ public KafkaCanalConnector(String servers, String topic, Integer partition, Stri batchSize = 100; } properties.put("max.poll.records", batchSize.toString()); - properties.put("key.deserializer", StringDeserializer.class.getName()); + properties.put("key.deserializer", StringDeserializer.class); if (!flatMessage) { - properties.put("value.deserializer", MessageDeserializer.class.getName()); + properties.put("value.deserializer", MessageDeserializer.class); } else { - properties.put("value.deserializer", StringDeserializer.class.getName()); + properties.put("value.deserializer", StringDeserializer.class); } } @@ -183,14 +183,13 @@ public List getListWithoutAck(Long timeout, TimeUnit unit) throws Canal ConsumerRecords records = kafkaConsumer.poll(unit.toMillis(timeout)); - currentOffsets.clear(); - for (TopicPartition topicPartition : records.partitions()) { - currentOffsets.put(topicPartition.partition(), kafkaConsumer.position(topicPartition)); - } - if (!records.isEmpty()) { + currentOffsets.clear(); List messages = new ArrayList<>(); for (ConsumerRecord record : records) { + if (currentOffsets.get(record.partition()) == null) { + currentOffsets.put(record.partition(), record.offset()); + } messages.add(record.value()); } return messages; @@ -221,14 +220,13 @@ public List getFlatListWithoutAck(Long timeout, TimeUnit unit) thro ConsumerRecords records = kafkaConsumer2.poll(unit.toMillis(timeout)); - currentOffsets.clear(); - for (TopicPartition topicPartition : records.partitions()) { - currentOffsets.put(topicPartition.partition(), kafkaConsumer2.position(topicPartition)); - } - if (!records.isEmpty()) { + currentOffsets.clear(); List flatMessages = new ArrayList<>(); for (ConsumerRecord record : records) { + if (currentOffsets.get(record.partition()) == null) { + currentOffsets.put(record.partition(), record.offset()); + } String flatMessageJson = record.value(); FlatMessage flatMessage = JSON.parseObject(flatMessageJson, FlatMessage.class); flatMessages.add(flatMessage); @@ -248,12 +246,14 @@ public void rollback() { // 回滚所有分区 if (kafkaConsumer != null) { for (Map.Entry entry : currentOffsets.entrySet()) { - kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1); + kafkaConsumer.seek(new TopicPartition(topic, entry.getKey()), currentOffsets.get(entry.getKey())); + kafkaConsumer.commitSync(); } } if (kafkaConsumer2 != null) { for (Map.Entry entry : currentOffsets.entrySet()) { - kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), entry.getValue() - 1); + kafkaConsumer2.seek(new TopicPartition(topic, entry.getKey()), currentOffsets.get(entry.getKey())); + kafkaConsumer.commitSync(); } } } @@ -324,3 +324,4 @@ public void setSessionTimeout(Long timeout, TimeUnit unit) { } } + diff --git a/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaOffsetCanalConnector.java b/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaOffsetCanalConnector.java index bd2f775dce..46841ae4e1 100644 --- a/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaOffsetCanalConnector.java +++ b/client/src/main/java/com/alibaba/otter/canal/client/kafka/KafkaOffsetCanalConnector.java @@ -1,6 +1,6 @@ package com.alibaba.otter.canal.client.kafka; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.client.kafka.protocol.KafkaFlatMessage; import com.alibaba.otter.canal.client.kafka.protocol.KafkaMessage; import com.alibaba.otter.canal.protocol.FlatMessage; diff --git a/client/src/main/java/com/alibaba/otter/canal/client/pulsarmq/PulsarMQCanalConnector.java b/client/src/main/java/com/alibaba/otter/canal/client/pulsarmq/PulsarMQCanalConnector.java index 31762417ab..9e3e2cb89c 100644 --- a/client/src/main/java/com/alibaba/otter/canal/client/pulsarmq/PulsarMQCanalConnector.java +++ b/client/src/main/java/com/alibaba/otter/canal/client/pulsarmq/PulsarMQCanalConnector.java @@ -1,6 +1,6 @@ package com.alibaba.otter.canal.client.pulsarmq; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.client.CanalMQConnector; import com.alibaba.otter.canal.client.CanalMessageDeserializer; import com.alibaba.otter.canal.client.impl.SimpleCanalConnector; diff --git a/client/src/main/java/com/alibaba/otter/canal/client/rabbitmq/RabbitMQCanalConnector.java b/client/src/main/java/com/alibaba/otter/canal/client/rabbitmq/RabbitMQCanalConnector.java index b0bbc97cec..0f9565b61c 100644 --- a/client/src/main/java/com/alibaba/otter/canal/client/rabbitmq/RabbitMQCanalConnector.java +++ b/client/src/main/java/com/alibaba/otter/canal/client/rabbitmq/RabbitMQCanalConnector.java @@ -1,6 +1,6 @@ package com.alibaba.otter.canal.client.rabbitmq; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.client.CanalMQConnector; import com.alibaba.otter.canal.client.CanalMessageDeserializer; import com.alibaba.otter.canal.client.ConsumerBatchMessage; diff --git a/client/src/main/java/com/alibaba/otter/canal/client/rocketmq/RocketMQCanalConnector.java b/client/src/main/java/com/alibaba/otter/canal/client/rocketmq/RocketMQCanalConnector.java index ac0a61916f..ac17899335 100644 --- a/client/src/main/java/com/alibaba/otter/canal/client/rocketmq/RocketMQCanalConnector.java +++ b/client/src/main/java/com/alibaba/otter/canal/client/rocketmq/RocketMQCanalConnector.java @@ -22,7 +22,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.client.CanalMQConnector; import com.alibaba.otter.canal.client.CanalMessageDeserializer; import com.alibaba.otter.canal.client.impl.SimpleCanalConnector; diff --git a/common/pom.xml b/common/pom.xml index c8125c453e..e3f5f96424 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml canal.common @@ -39,8 +39,8 @@ commons-codec - com.alibaba - fastjson + com.alibaba.fastjson2 + fastjson2 com.google.guava diff --git a/common/src/main/java/com/alibaba/otter/canal/common/utils/JsonUtils.java b/common/src/main/java/com/alibaba/otter/canal/common/utils/JsonUtils.java index 456d96887e..f43a356f70 100644 --- a/common/src/main/java/com/alibaba/otter/canal/common/utils/JsonUtils.java +++ b/common/src/main/java/com/alibaba/otter/canal/common/utils/JsonUtils.java @@ -1,21 +1,15 @@ package com.alibaba.otter.canal.common.utils; -import java.io.IOException; import java.lang.reflect.Type; -import java.net.Inet4Address; -import java.net.Inet6Address; import java.net.InetAddress; import java.util.Arrays; import java.util.List; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.TypeReference; -import com.alibaba.fastjson.parser.ParserConfig; -import com.alibaba.fastjson.serializer.JSONSerializer; -import com.alibaba.fastjson.serializer.ObjectSerializer; -import com.alibaba.fastjson.serializer.SerializeConfig; -import com.alibaba.fastjson.serializer.SerializeWriter; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.*; +import com.alibaba.fastjson2.filter.Filter; +import com.alibaba.fastjson2.filter.PropertyFilter; +import com.alibaba.fastjson2.writer.ObjectWriter; + /** * 字节处理相关工具类 @@ -23,46 +17,41 @@ * @author jianghang */ public class JsonUtils { - - static { - SerializeConfig.getGlobalInstance().put(InetAddress.class, InetAddressSerializer.instance); - SerializeConfig.getGlobalInstance().put(Inet4Address.class, InetAddressSerializer.instance); - SerializeConfig.getGlobalInstance().put(Inet6Address.class, InetAddressSerializer.instance); - // ParserConfig.getGlobalInstance().setAutoTypeSupport(true); - - ParserConfig.getGlobalInstance().addAccept("com.alibaba.otter."); - ParserConfig.getGlobalInstance().addAccept("com.taobao.tddl.dbsync."); - } + static final Filter AUTO_TYPE_FILTER = JSONReader.autoTypeFilter( + "com.alibaba.otter.", + "com.taobao.tddl.dbsync." + ); public static T unmarshalFromByte(byte[] bytes, Class targetClass) { - return (T) JSON.parseObject(bytes, targetClass);// 默认为UTF-8 + return (T) JSON.parseObject(bytes, targetClass, AUTO_TYPE_FILTER);// 默认为UTF-8 } public static T unmarshalFromByte(byte[] bytes, TypeReference type) { - return (T) JSON.parseObject(bytes, type.getType()); + + return (T) JSON.parseObject(bytes, type.getType(), AUTO_TYPE_FILTER); } public static byte[] marshalToByte(Object obj) { return JSON.toJSONBytes(obj); // 默认为UTF-8 } - public static byte[] marshalToByte(Object obj, SerializerFeature... features) { + public static byte[] marshalToByte(Object obj, JSONWriter.Feature... features) { return JSON.toJSONBytes(obj, features); // 默认为UTF-8 } public static T unmarshalFromString(String json, Class targetClass) { - return (T) JSON.parseObject(json, targetClass);// 默认为UTF-8 + return (T) JSON.parseObject(json, targetClass, AUTO_TYPE_FILTER);// 默认为UTF-8 } public static T unmarshalFromString(String json, TypeReference type) { - return (T) JSON.parseObject(json, type);// 默认为UTF-8 + return (T) JSON.parseObject(json, type.getType(), AUTO_TYPE_FILTER);// 默认为UTF-8 } public static String marshalToString(Object obj) { return JSON.toJSONString(obj); // 默认为UTF-8 } - public static String marshalToString(Object obj, SerializerFeature... features) { + public static String marshalToString(Object obj, JSONWriter.Feature... features) { return JSON.toJSONString(obj, features); // 默认为UTF-8 } @@ -71,29 +60,29 @@ public static String marshalToString(Object obj, SerializerFeature... features) */ public static String marshalToString(Object obj, String... fliterFields) { final List propertyFliters = Arrays.asList(fliterFields); - try (SerializeWriter out = new SerializeWriter()) { - JSONSerializer serializer = new JSONSerializer(out); - serializer.getPropertyFilters().add((source, name, value) -> !propertyFliters.contains(name)); - serializer.write(obj); - return out.toString(); - } + + return JSON.toJSONString(obj, new PropertyFilter() { + @Override + public boolean apply(Object object, String name, Object value) { + return !propertyFliters.contains(name); + } + }); } - public static class InetAddressSerializer implements ObjectSerializer { + public static class InetAddressWriter implements ObjectWriter { - public static InetAddressSerializer instance = new InetAddressSerializer(); + public static InetAddressWriter instance = new InetAddressWriter(); @Override - public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType, int features) - throws IOException { + public void write(JSONWriter jsonWriter, Object object, Object fieldName, Type fieldType, long features) { if (object == null) { - serializer.writeNull(); + jsonWriter.writeNull(); return; } InetAddress address = (InetAddress) object; // 优先使用name - serializer.write(address.getHostName()); + jsonWriter.writeString(address.getHostName()); } } } diff --git a/common/src/main/java/com/alibaba/otter/canal/common/zookeeper/running/ServerRunningMonitor.java b/common/src/main/java/com/alibaba/otter/canal/common/zookeeper/running/ServerRunningMonitor.java index fe983dc2b7..bc068abdc2 100644 --- a/common/src/main/java/com/alibaba/otter/canal/common/zookeeper/running/ServerRunningMonitor.java +++ b/common/src/main/java/com/alibaba/otter/canal/common/zookeeper/running/ServerRunningMonitor.java @@ -38,7 +38,7 @@ public class ServerRunningMonitor extends AbstractCanalLifeCycle { private ServerRunningData serverData; // 当前实际运行的节点状态信息 private volatile ServerRunningData activeData; - private ScheduledExecutorService delayExector = Executors.newScheduledThreadPool(1); + private ScheduledExecutorService delayExecutor; private int delayTime = 5; private ServerRunningListener listener; @@ -73,7 +73,7 @@ public void handleDataDeleted(String dataPath) throws Exception { initRunning(); } else { // 否则就是等待delayTime,避免因网络瞬端或者zk异常,导致出现频繁的切换操作 - delayExector.schedule(() -> initRunning(), delayTime, TimeUnit.SECONDS); + delayExecutor.schedule(() -> initRunning(), delayTime, TimeUnit.SECONDS); } } @@ -90,6 +90,7 @@ public synchronized void start() { try { processStart(); if (zkClient != null) { + delayExecutor = Executors.newScheduledThreadPool(1); // 如果需要尽可能释放instance资源,不需要监听running节点,不然即使stop了这台机器,另一台机器立马会start String path = ZookeeperPathUtils.getDestinationServerRunning(destination); zkClient.subscribeDataChanges(path, dataListener); @@ -122,6 +123,10 @@ public synchronized void stop() { if (zkClient != null) { String path = ZookeeperPathUtils.getDestinationServerRunning(destination); zkClient.unsubscribeDataChanges(path, dataListener); + if (delayExecutor != null) { + delayExecutor.shutdown(); + delayExecutor = null; + } releaseRunning(); // 尝试一下release } else { diff --git a/common/src/test/java/com/alibaba/otter/canal/common/JsonUtilsTest.java b/common/src/test/java/com/alibaba/otter/canal/common/JsonUtilsTest.java new file mode 100644 index 0000000000..b913062f87 --- /dev/null +++ b/common/src/test/java/com/alibaba/otter/canal/common/JsonUtilsTest.java @@ -0,0 +1,21 @@ +package com.alibaba.otter.canal.common; + +import com.alibaba.otter.canal.common.utils.JsonUtils; +import org.junit.Test; + +import java.net.InetAddress; + +import static org.junit.Assert.assertEquals; + +public class JsonUtilsTest { + @Test + public void marshalToString() throws Exception { + InetAddress address = InetAddress.getByName("localhost"); + + String json = JsonUtils.marshalToString(address); + assertEquals("\"localhost\"", json); + + InetAddress address1 = JsonUtils.unmarshalFromString(json, InetAddress.class); + assertEquals(address, address1); + } +} diff --git a/common/src/test/java/com/alibaba/otter/canal/common/ServerRunningTest.java b/common/src/test/java/com/alibaba/otter/canal/common/ServerRunningTest.java index 942ab81b5f..ae0af9cc4e 100644 --- a/common/src/test/java/com/alibaba/otter/canal/common/ServerRunningTest.java +++ b/common/src/test/java/com/alibaba/otter/canal/common/ServerRunningTest.java @@ -106,21 +106,21 @@ private ServerRunningMonitor buildServerRunning(final CountDownLatch countLatch, runningMonitor.setListener(new ServerRunningListener() { public void processActiveEnter() { - System.out.println(String.format("cid:%s ip:%s:%s has start", ip, port)); + System.out.println(String.format("ip:%s:%s has start", ip, port)); countLatch.countDown(); } public void processActiveExit() { - System.out.println(String.format("cid:%s ip:%s:%s has stop", ip, port)); + System.out.println(String.format("ip:%s:%s has stop", ip, port)); countLatch.countDown(); } public void processStart() { - System.out.println(String.format("cid:%s ip:%s:%s processStart", ip, port)); + System.out.println(String.format("ip:%s:%s processStart", ip, port)); } public void processStop() { - System.out.println(String.format("cid:%s ip:%s:%s processStop", ip, port)); + System.out.println(String.format("ip:%s:%s processStop", ip, port)); } }); diff --git a/connector/core/pom.xml b/connector/core/pom.xml index d981f058c0..f5af14844b 100644 --- a/connector/core/pom.xml +++ b/connector/core/pom.xml @@ -3,7 +3,7 @@ canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT 4.0.0 com.alibaba.otter diff --git a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/filter/AviaterRegexFilter.java b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/filter/AviaterRegexFilter.java index d95087c6c1..6d7bb28390 100644 --- a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/filter/AviaterRegexFilter.java +++ b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/filter/AviaterRegexFilter.java @@ -107,11 +107,11 @@ public int compare(String str1, String str2) { private List completionPattern(List patterns) { List result = new ArrayList<>(); for (String pattern : patterns) { - StringBuffer stringBuffer = new StringBuffer(); - stringBuffer.append("^"); - stringBuffer.append(pattern); - stringBuffer.append("$"); - result.add(stringBuffer.toString()); + StringBuilder stringBuilder = new StringBuilder(); + stringBuilder.append("^"); + stringBuilder.append(pattern); + stringBuilder.append("$"); + result.add(stringBuilder.toString()); } return result; } diff --git a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/producer/MQMessageUtils.java b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/producer/MQMessageUtils.java index 2e60ee243f..fcbe69ad46 100644 --- a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/producer/MQMessageUtils.java +++ b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/producer/MQMessageUtils.java @@ -374,6 +374,7 @@ public static List messageConverter(EntryRowData[] datas, long id) flatMessage.setEs(entry.getHeader().getExecuteTime()); flatMessage.setTs(System.currentTimeMillis()); flatMessage.setSql(rowChange.getSql()); + flatMessage.setGtid(entry.getHeader().getGtid()); if (!rowChange.getIsDdl()) { Map sqlType = new LinkedHashMap<>(); @@ -432,9 +433,7 @@ public static List messageConverter(EntryRowData[] datas, long id) } } // update操作将记录修改前的值 - if (!rowOld.isEmpty()) { - old.add(rowOld); - } + old.add(rowOld); } } if (!sqlType.isEmpty()) { @@ -526,6 +525,7 @@ public static FlatMessage[] messagePartition(FlatMessage flatMessage, Integer pa flatMessageTmp.setEs(flatMessage.getEs()); flatMessageTmp.setTs(flatMessage.getTs()); flatMessageTmp.setPkNames(flatMessage.getPkNames()); + flatMessageTmp.setGtid(flatMessage.getGtid()); } List> data = flatMessageTmp.getData(); if (data == null) { diff --git a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMQProducer.java b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMQProducer.java new file mode 100644 index 0000000000..e745f5684c --- /dev/null +++ b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMQProducer.java @@ -0,0 +1,66 @@ +package com.alibaba.otter.canal.connector.core.spi; + +import com.alibaba.otter.canal.connector.core.config.MQProperties; +import com.alibaba.otter.canal.connector.core.producer.MQDestination; +import com.alibaba.otter.canal.connector.core.util.Callback; +import com.alibaba.otter.canal.protocol.Message; +import java.util.Properties; + +public class ProxyCanalMQProducer implements CanalMQProducer { + + private CanalMQProducer canalMQProducer; + + public ProxyCanalMQProducer(CanalMQProducer canalMQProducer) { + this.canalMQProducer = canalMQProducer; + } + + private ClassLoader changeCL() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(canalMQProducer.getClass().getClassLoader()); + return cl; + } + + private void revertCL(ClassLoader cl) { + Thread.currentThread().setContextClassLoader(cl); + } + + @Override + public void init(Properties properties) { + ClassLoader cl = changeCL(); + try { + canalMQProducer.init(properties); + } finally { + revertCL(cl); + } + } + + @Override + public MQProperties getMqProperties() { + ClassLoader cl = changeCL(); + try { + return canalMQProducer.getMqProperties(); + } finally { + revertCL(cl); + } + } + + @Override + public void send(MQDestination canalDestination, Message message, Callback callback) { + ClassLoader cl = changeCL(); + try { + canalMQProducer.send(canalDestination, message, callback); + } finally { + revertCL(cl); + } + } + + @Override + public void stop() { + ClassLoader cl = changeCL(); + try { + canalMQProducer.stop(); + } finally { + revertCL(cl); + } + } +} diff --git a/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMsgConsumer.java b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMsgConsumer.java new file mode 100644 index 0000000000..765add8043 --- /dev/null +++ b/connector/core/src/main/java/com/alibaba/otter/canal/connector/core/spi/ProxyCanalMsgConsumer.java @@ -0,0 +1,86 @@ +package com.alibaba.otter.canal.connector.core.spi; + +import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class ProxyCanalMsgConsumer implements CanalMsgConsumer { + + private CanalMsgConsumer canalMsgConsumer; + + public ProxyCanalMsgConsumer(CanalMsgConsumer canalMsgConsumer) { + this.canalMsgConsumer = canalMsgConsumer; + } + + private ClassLoader changeCL() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(canalMsgConsumer.getClass().getClassLoader()); + return cl; + } + + private void revertCL(ClassLoader cl) { + Thread.currentThread().setContextClassLoader(cl); + } + + + @Override + public void init(Properties properties, String topic, String groupId) { + ClassLoader cl = changeCL(); + try { + canalMsgConsumer.init(properties, topic, groupId); + } finally { + revertCL(cl); + } + } + + @Override + public void connect() { + ClassLoader cl = changeCL(); + try { + canalMsgConsumer.connect(); + } finally { + revertCL(cl); + } + } + + @Override + public List getMessage(Long timeout, TimeUnit unit) { + ClassLoader cl = changeCL(); + try { + return canalMsgConsumer.getMessage(timeout, unit); + } finally { + revertCL(cl); + } + } + + @Override + public void ack() { + ClassLoader cl = changeCL(); + try { + canalMsgConsumer.ack(); + } finally { + revertCL(cl); + } + } + + @Override + public void rollback() { + ClassLoader cl = changeCL(); + try { + canalMsgConsumer.rollback(); + } finally { + revertCL(cl); + } + } + + @Override + public void disconnect() { + ClassLoader cl = changeCL(); + try { + canalMsgConsumer.disconnect(); + } finally { + revertCL(cl); + } + } +} diff --git a/connector/kafka-connector/pom.xml b/connector/kafka-connector/pom.xml index 8c8db9e946..2dfe68c21f 100644 --- a/connector/kafka-connector/pom.xml +++ b/connector/kafka-connector/pom.xml @@ -3,7 +3,7 @@ canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml @@ -28,8 +28,8 @@ org.apache.kafka - kafka_2.11 - 1.1.1 + kafka-clients + ${kafka_version} junit @@ -89,4 +89,4 @@ - \ No newline at end of file + diff --git a/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/consumer/CanalKafkaConsumer.java b/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/consumer/CanalKafkaConsumer.java index 07be61d31c..ecd5ecfefb 100644 --- a/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/consumer/CanalKafkaConsumer.java +++ b/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/consumer/CanalKafkaConsumer.java @@ -16,7 +16,7 @@ import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.StringDeserializer; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.connector.core.config.CanalConstants; import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; import com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer; diff --git a/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/producer/CanalKafkaProducer.java b/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/producer/CanalKafkaProducer.java index 3c30b96b45..9fdd342f58 100644 --- a/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/producer/CanalKafkaProducer.java +++ b/connector/kafka-connector/src/main/java/com/alibaba/otter/canal/connector/kafka/producer/CanalKafkaProducer.java @@ -18,8 +18,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter; import com.alibaba.otter.canal.common.utils.ExecutorTemplate; import com.alibaba.otter.canal.connector.core.producer.AbstractMQProducer; import com.alibaba.otter.canal.connector.core.producer.MQDestination; @@ -249,13 +249,13 @@ private List send(MQDestination mqDestination, String topicName, Message FlatMessage flatMessagePart = partitionFlatMessage[i]; if (flatMessagePart != null) { records.add(new ProducerRecord<>(topicName, i, null, JSON.toJSONBytes(flatMessagePart, - SerializerFeature.WriteMapNullValue))); + JSONWriter.Feature.WriteNulls))); } } } else { final int partition = mqDestination.getPartition() != null ? mqDestination.getPartition() : 0; records.add(new ProducerRecord<>(topicName, partition, null, JSON.toJSONBytes(flatMessage, - SerializerFeature.WriteMapNullValue))); + JSONWriter.Feature.WriteNulls))); } } } diff --git a/connector/pom.xml b/connector/pom.xml index db67d1504b..b6790c5981 100644 --- a/connector/pom.xml +++ b/connector/pom.xml @@ -4,13 +4,13 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter canal.connector - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT pom canal connector module for otter ${project.version} @@ -39,7 +39,7 @@ central - http://repo1.maven.org/maven2 + https://repo1.maven.org/maven2 true @@ -49,7 +49,7 @@ java.net - http://download.java.net/maven/2/ + https://download.java.net/maven/2/ true @@ -59,7 +59,7 @@ aliyun - http://maven.aliyun.com/nexus/content/groups/public/ + https://maven.aliyun.com/nexus/content/groups/public/ true diff --git a/connector/pulsarmq-connector/pom.xml b/connector/pulsarmq-connector/pom.xml index d1193944bd..ce76bc7392 100644 --- a/connector/pulsarmq-connector/pom.xml +++ b/connector/pulsarmq-connector/pom.xml @@ -1,11 +1,9 @@ - + canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml 4.0.0 @@ -37,6 +35,12 @@ org.apache.pulsar pulsar-client + ${pulsar_version} + + + org.apache.pulsar + pulsar-client-admin + ${pulsar_version} diff --git a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQConstants.java b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQConstants.java index 19df91fe0c..1c9c1a8e66 100644 --- a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQConstants.java +++ b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQConstants.java @@ -1,7 +1,5 @@ package com.alibaba.otter.canal.connector.pulsarmq.config; -import org.joda.time.Seconds; - /** * PulsarMQ配置 * @@ -10,19 +8,20 @@ * @since 1 by chad at 2021/9/15 新增配置文件 */ public class PulsarMQConstants { - public static final String ROOT = "pulsarmq"; + + public static final String ROOT = "pulsarmq"; /** * pulsar服务连接地址 */ - public static final String PULSARMQ_SERVER_URL = ROOT + "." + "serverUrl"; + public static final String PULSARMQ_SERVER_URL = ROOT + "." + "serverUrl"; /** * pulsar服务角色token,需要有对应token的生产者权限 */ - public static final String PULSARMQ_ROLE_TOKEN = ROOT + "." + "roleToken"; + public static final String PULSARMQ_ROLE_TOKEN = ROOT + "." + "roleToken"; /** * topic前缀 */ - public static final String PULSARMQ_TOPIC_TENANT_PREFIX = ROOT + "." + "topicTenantPrefix"; + public static final String PULSARMQ_TOPIC_TENANT_PREFIX = ROOT + "." + "topicTenantPrefix"; /**** 消费者 *****/ /** @@ -32,31 +31,34 @@ public class PulsarMQConstants { /** * 批量处理超时时间 */ - public static final String PULSARMQ_BATCH_PROCESS_TIMEOUT = ROOT + "." + "batchProcessTimeout"; + public static final String PULSARMQ_BATCH_PROCESS_TIMEOUT = ROOT + "." + "batchProcessTimeout"; /** * 消费都订阅名称,将以该名称为消费者身份标识,同一个subscriptName,认为是同一个消费实例 */ - public static final String PULSARMQ_SUBSCRIPT_NAME = ROOT + "." + "subscriptName"; + public static final String PULSARMQ_SUBSCRIPT_NAME = ROOT + "." + "subscriptName"; /** * 重试间隔秒数 */ - public static final String PULSARMQ_REDELIVERY_DELAY_SECONDS = ROOT + "." + "redeliveryDelaySeconds"; + public static final String PULSARMQ_REDELIVERY_DELAY_SECONDS = ROOT + "." + "redeliveryDelaySeconds"; /** * ACK超时秒数 */ - public static final String PULSARMQ_ACK_TIMEOUT_SECONDS = ROOT + "." + "ackTimeoutSeconds"; + public static final String PULSARMQ_ACK_TIMEOUT_SECONDS = ROOT + "." + "ackTimeoutSeconds"; /** * 是否开启消费重试 */ - public static final String PULSARMQ_IS_RETRY = ROOT + "." + "isRetry"; + public static final String PULSARMQ_IS_RETRY = ROOT + "." + "isRetry"; /** * 自动生成的 retry dlq队列名称后缀是否大写 */ - public static final String PULSARMQ_IS_RETRY_DLQ_UPPERCASE = ROOT + "." + "isRetryDLQUpperCase"; + public static final String PULSARMQ_IS_RETRY_DLQ_UPPERCASE = ROOT + "." + "isRetryDLQUpperCase"; /** * 最大重试次数 */ - public static final String PULSARMQ_MAX_REDELIVERY_COUNT = ROOT + "." + "maxRedeliveryCount"; - + public static final String PULSARMQ_MAX_REDELIVERY_COUNT = ROOT + "." + "maxRedeliveryCount"; + /** + * Pulsar admin服务器地址 + */ + public static final String PULSARMQ_ADMIN_SERVER_URL = ROOT + "." + "adminServerUrl"; } diff --git a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQProducerConfig.java b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQProducerConfig.java index c7fc9c9aea..1ace4aa0d2 100644 --- a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQProducerConfig.java +++ b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/config/PulsarMQProducerConfig.java @@ -4,16 +4,18 @@ /** * Pulsar生产者配置 + * * @author chad * @date 2021/9/15 11:23 * @since 1 by chad at 2021/9/15 新增 */ public class PulsarMQProducerConfig extends MQProperties { + /** * pulsar服务连接地址 *

- * broker: pulsar://localhost:6650
- * httpUrl: http://localhost:8080 + * broker: pulsar://localhost:6650
+ * httpUrl: http://localhost:8080 *

*/ private String serverUrl; @@ -30,6 +32,10 @@ public class PulsarMQProducerConfig extends MQProperties { * 生产者角色权限,请确保该角色有canal使用的所有topic生产者权限(最低要求) */ private String roleToken; + /** + * admin服务器地址 + */ + private String adminServerUrl; public String getServerUrl() { return serverUrl; @@ -54,4 +60,12 @@ public String getTopicTenantPrefix() { public void setTopicTenantPrefix(String topicTenantPrefix) { this.topicTenantPrefix = topicTenantPrefix; } + + public String getAdminServerUrl() { + return adminServerUrl; + } + + public void setAdminServerUrl(String adminServerUrl) { + this.adminServerUrl = adminServerUrl; + } } diff --git a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumer.java b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumer.java index fc9369a109..0a535468cb 100644 --- a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumer.java +++ b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumer.java @@ -1,6 +1,13 @@ package com.alibaba.otter.canal.connector.pulsarmq.consumer; -import com.alibaba.fastjson.JSON; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.StringUtils; +import org.apache.pulsar.client.api.*; + +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.common.utils.MQUtil; import com.alibaba.otter.canal.connector.core.config.CanalConstants; import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; @@ -12,15 +19,6 @@ import com.alibaba.otter.canal.protocol.Message; import com.alibaba.otter.canal.protocol.exception.CanalClientException; import com.google.common.collect.Lists; -import org.apache.commons.lang.StringUtils; -import org.apache.pulsar.client.api.*; - -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.util.List; -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.TimeUnit; /** * Pulsar consumer SPI 实现 @@ -30,19 +28,20 @@ */ @SPI("pulsarmq") public class CanalPulsarMQConsumer implements CanalMsgConsumer { + /** * 连接pulsar客户端 */ - private PulsarClient pulsarClient; - private Consumer pulsarMQConsumer; + private PulsarClient pulsarClient; + private Consumer pulsarMQConsumer; /** * 是否为扁平消息 */ - private boolean flatMessage = false; + private boolean flatMessage = false; /** * 主题名称 */ - private String topic; + private String topic; /** * 单线程控制 */ @@ -50,58 +49,57 @@ public class CanalPulsarMQConsumer implements CanalMsgConsumer { /** * 环境连接URL */ - private String serviceUrl; + private String serviceUrl; /** * 角色认证token */ - private String roleToken; + private String roleToken; /** * 订阅客户端名称 */ - private String subscriptName; + private String subscriptName; /** * 每次批量获取数据的最大条目数,默认30 */ - private int batchSize = 30; + private int batchSize = 30; /** - * 与{@code batchSize}一起决定批量获取的数据大小 - * 当: + * 与{@code batchSize}一起决定批量获取的数据大小 当: *

* 1. {@code batchSize} 条消息未消费时
* 2. 距上一次批量消费时间达到{@code batchTimeoutSeconds}秒时 *

* 任一条件满足,即执行批量消费 */ - private int getBatchTimeoutSeconds = 30; + private int getBatchTimeoutSeconds = 30; /** * 批量处理消息时,一次批量处理的超时时间 *

* 该时间应该根据{@code batchSize}和{@code batchTimeoutSeconds}合理设置 *

*/ - private long batchProcessTimeout = 60 * 1000; + private long batchProcessTimeout = 60 * 1000; /** * 消费失败后的重试秒数,默认60秒 */ - private int redeliveryDelaySeconds = 60; + private int redeliveryDelaySeconds = 60; /** * 当客户端接收到消息,30秒还没有返回ack给服务端时,ack超时,会重新消费该消息 */ - private int ackTimeoutSeconds = 30; + private int ackTimeoutSeconds = 30; /** * 是否开启消息失败重试功能,默认开启 */ - private boolean isRetry = true; + private boolean isRetry = true; /** *

* true重试(-RETRY)和死信队列(-DLQ)后缀为大写,有些地方创建的为小写,需确保正确 *

*/ - private boolean isRetryDLQUpperCase = false; + private boolean isRetryDLQUpperCase = false; /** * 最大重试次数 */ - private int maxRedeliveryCount = 128; + private int maxRedeliveryCount = 128; @Override public void init(Properties properties, String topic, String groupId) { @@ -113,6 +111,11 @@ public void init(Properties properties, String topic, String groupId) { this.serviceUrl = properties.getProperty(PulsarMQConstants.PULSARMQ_SERVER_URL); this.roleToken = properties.getProperty(PulsarMQConstants.PULSARMQ_ROLE_TOKEN); this.subscriptName = properties.getProperty(PulsarMQConstants.PULSARMQ_SUBSCRIPT_NAME); + // 采用groupId作为subscriptName,避免所有的都是同一个订阅者名称 + if (StringUtils.isEmpty(this.subscriptName)) { + this.subscriptName = groupId; + } + if (StringUtils.isEmpty(this.subscriptName)) { throw new RuntimeException("Pulsar Consumer subscriptName required"); } @@ -157,10 +160,12 @@ public void connect() { } // 连接创建客户端 try { - pulsarClient = PulsarClient.builder() - .serviceUrl(serviceUrl) - .authentication(AuthenticationFactory.token(roleToken)) - .build(); + // AuthenticationDataProvider + ClientBuilder builder = PulsarClient.builder().serviceUrl(serviceUrl); + if (StringUtils.isNotEmpty(roleToken)) { + builder.authentication(AuthenticationFactory.token(roleToken)); + } + pulsarClient = builder.build(); } catch (PulsarClientException e) { throw new RuntimeException(e); } @@ -176,15 +181,14 @@ public void connect() { builder.subscriptionType(SubscriptionType.Failover); builder - // 调用consumer.negativeAcknowledge(message) (即nack)来表示消费失败的消息 - // 在指定的时间进行重新消费,默认是1分钟。 - .negativeAckRedeliveryDelay(this.redeliveryDelaySeconds, TimeUnit.SECONDS) - .subscriptionName(this.subscriptName) - ; + // 调用consumer.negativeAcknowledge(message) (即nack)来表示消费失败的消息 + // 在指定的时间进行重新消费,默认是1分钟。 + .negativeAckRedeliveryDelay(this.redeliveryDelaySeconds, TimeUnit.SECONDS) + .subscriptionName(this.subscriptName); if (this.isRetry) { DeadLetterPolicy.DeadLetterPolicyBuilder dlqBuilder = DeadLetterPolicy.builder() - // 最大重试次数 - .maxRedeliverCount(this.maxRedeliveryCount); + // 最大重试次数 + .maxRedeliverCount(this.maxRedeliveryCount); // 指定重试队列,不是多个或通配符topic才能判断重试队列 if (!MQUtil.isPatternTag(this.topic)) { String retryTopic = this.topic + (this.isRetryDLQUpperCase ? "-RETRY" : "-retry"); @@ -193,19 +197,17 @@ public void connect() { dlqBuilder.deadLetterTopic(dlqTopic); } - //默认关闭,如果需要重试则开启 - builder.enableRetry(true) - .deadLetterPolicy(dlqBuilder.build()); + // 默认关闭,如果需要重试则开启 + builder.enableRetry(true).deadLetterPolicy(dlqBuilder.build()); } // ack超时 builder.ackTimeout(this.ackTimeoutSeconds, TimeUnit.SECONDS); // pulsar批量获取消息设置 - builder.batchReceivePolicy(new BatchReceivePolicy.Builder() - .maxNumMessages(this.batchSize) - .timeout(this.getBatchTimeoutSeconds, TimeUnit.SECONDS) - .build()); + builder.batchReceivePolicy(new BatchReceivePolicy.Builder().maxNumMessages(this.batchSize) + .timeout(this.getBatchTimeoutSeconds, TimeUnit.SECONDS) + .build()); try { this.pulsarMQConsumer = builder.subscribe(); @@ -220,7 +222,6 @@ public List getMessage(Long timeout, TimeUnit unit) { List messageList = Lists.newArrayList(); try { Messages messages = pulsarMQConsumer.batchReceive(); - if (null == messages || messages.size() == 0) { return messageList; } @@ -278,7 +279,8 @@ public void disconnect() { return; } try { - this.pulsarMQConsumer.unsubscribe(); + // 会导致暂停期间数据丢失 + // this.pulsarMQConsumer.unsubscribe(); this.pulsarClient.close(); } catch (PulsarClientException e) { throw new CanalClientException("Disconnect pulsar consumer error", e); diff --git a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/producer/CanalPulsarMQProducer.java b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/producer/CanalPulsarMQProducer.java index 4c2c8f38b5..1b48893546 100644 --- a/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/producer/CanalPulsarMQProducer.java +++ b/connector/pulsarmq-connector/src/main/java/com/alibaba/otter/canal/connector/pulsarmq/producer/CanalPulsarMQProducer.java @@ -1,7 +1,19 @@ package com.alibaba.otter.canal.connector.pulsarmq.producer; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import java.util.*; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.StringUtils; +import org.apache.pulsar.client.admin.PulsarAdmin; +import org.apache.pulsar.client.admin.PulsarAdminException; +import org.apache.pulsar.client.api.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.alibaba.otter.canal.common.utils.ExecutorTemplate; import com.alibaba.otter.canal.common.utils.NamedThreadFactory; import com.alibaba.otter.canal.common.utils.PropertiesUtils; @@ -16,17 +28,6 @@ import com.alibaba.otter.canal.connector.pulsarmq.config.PulsarMQProducerConfig; import com.alibaba.otter.canal.protocol.CanalEntry; import com.alibaba.otter.canal.protocol.FlatMessage; -import org.apache.commons.lang.StringUtils; -import org.apache.pulsar.client.api.*; -import org.apache.pulsar.shade.com.google.gson.JsonParser; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; /** * PulsarMQ Producer SPI 实现 @@ -37,19 +38,22 @@ @SPI("pulsarmq") public class CanalPulsarMQProducer extends AbstractMQProducer implements CanalMQProducer { - private static final Logger logger = LoggerFactory.getLogger(CanalPulsarMQProducer.class); - - private static final Map> PRODUCERS = new HashMap<>(); - - protected ThreadPoolExecutor sendPartitionExecutor; /** * 消息体分区属性名称 */ - public static final String MSG_PROPERTY_PARTITION_NAME = "partitionNum"; + public static final String MSG_PROPERTY_PARTITION_NAME = "partitionNum"; + private static final Logger logger = LoggerFactory + .getLogger(CanalPulsarMQProducer.class); + private static final Map> PRODUCERS = new HashMap<>(); + protected ThreadPoolExecutor sendPartitionExecutor; /** * pulsar客户端,管理连接 */ - protected PulsarClient client; + protected PulsarClient client; + /** + * Pulsar admin 客户端 + */ + protected PulsarAdmin pulsarAdmin; @Override public void init(Properties properties) { @@ -61,25 +65,36 @@ public void init(Properties properties) { // 初始化连接客户端 try { - client = PulsarClient.builder() - // 填写pulsar的连接地址 - .serviceUrl(pulsarMQProducerConfig.getServerUrl()) - // 角色权限认证的token - .authentication(AuthenticationFactory.token(pulsarMQProducerConfig.getRoleToken())) - .build(); + ClientBuilder builder = PulsarClient.builder() + // 填写pulsar的连接地址 + .serviceUrl(pulsarMQProducerConfig.getServerUrl()); + if (StringUtils.isNotEmpty(pulsarMQProducerConfig.getRoleToken())) { + // 角色权限认证的token + builder.authentication(AuthenticationFactory.token(pulsarMQProducerConfig.getRoleToken())); + } + client = builder.build(); } catch (PulsarClientException e) { throw new RuntimeException(e); } - // 加载所有生产者 --> topic可能有正则或表名,无法确认所有topic,在使用时再加载 + // 初始化Pulsar admin + if (StringUtils.isNotEmpty(pulsarMQProducerConfig.getAdminServerUrl())) { + try { + pulsarAdmin = PulsarAdmin.builder().serviceHttpUrl(pulsarMQProducerConfig.getAdminServerUrl()).build(); + } catch (PulsarClientException e) { + throw new RuntimeException(e); + } + } + + // 加载所有生产者 --> topic可能有正则或表名,无法确认所有topic,在使用时再加载 int parallelPartitionSendThreadSize = mqProperties.getParallelSendThreadSize(); sendPartitionExecutor = new ThreadPoolExecutor(parallelPartitionSendThreadSize, - parallelPartitionSendThreadSize, - 0, - TimeUnit.SECONDS, - new ArrayBlockingQueue<>(parallelPartitionSendThreadSize * 2), - new NamedThreadFactory("MQ-Parallel-Sender-Partition"), - new ThreadPoolExecutor.CallerRunsPolicy()); + parallelPartitionSendThreadSize, + 0, + TimeUnit.SECONDS, + new ArrayBlockingQueue<>(parallelPartitionSendThreadSize * 2), + new NamedThreadFactory("MQ-Parallel-Sender-Partition"), + new ThreadPoolExecutor.CallerRunsPolicy()); } /** @@ -102,10 +117,15 @@ private void loadPulsarMQProperties(Properties properties) { if (!StringUtils.isEmpty(roleToken)) { tmpProperties.setRoleToken(roleToken); } - String topicTenantPrefix = PropertiesUtils.getProperty(properties, PulsarMQConstants.PULSARMQ_TOPIC_TENANT_PREFIX); + String topicTenantPrefix = PropertiesUtils.getProperty(properties, + PulsarMQConstants.PULSARMQ_TOPIC_TENANT_PREFIX); if (!StringUtils.isEmpty(topicTenantPrefix)) { tmpProperties.setTopicTenantPrefix(topicTenantPrefix); } + String adminServerUrl = PropertiesUtils.getProperty(properties, PulsarMQConstants.PULSARMQ_ADMIN_SERVER_URL); + if (!StringUtils.isEmpty(adminServerUrl)) { + tmpProperties.setAdminServerUrl(adminServerUrl); + } if (logger.isDebugEnabled()) { logger.debug("Load pulsar properties ==> {}", JSON.toJSON(this.mqProperties)); } @@ -119,8 +139,8 @@ private void loadPulsarMQProperties(Properties properties) { *

* * @param destination 消息目标信息 - * @param message 消息 - * @param callback 消息发送结果回调 + * @param message 消息 + * @param callback 消息发送结果回调 * @return void * @date 2021/9/2 22:01 * @author chad @@ -133,9 +153,8 @@ public void send(MQDestination destination, com.alibaba.otter.canal.protocol.Mes try { if (!StringUtils.isEmpty(destination.getDynamicTopic())) { // 动态topic - Map messageMap = MQMessageUtils.messageTopics(message, - destination.getTopic(), - destination.getDynamicTopic()); + Map messageMap = MQMessageUtils + .messageTopics(message, destination.getTopic(), destination.getDynamicTopic()); for (Map.Entry entry : messageMap.entrySet()) { String topicName = entry.getKey().replace('.', '_'); @@ -174,14 +193,20 @@ public void send(MQDestination destination, com.alibaba.otter.canal.protocol.Mes * @author chad * @since 1.0.0 by chad at 2021/9/2: 新增 */ - public void send(final MQDestination destination, String topicName, com.alibaba.otter.canal.protocol.Message message) { + public void send(final MQDestination destination, String topicName, + com.alibaba.otter.canal.protocol.Message message) { // 获取当前topic的分区数 Integer partitionNum = MQMessageUtils.parseDynamicTopicPartition(topicName, - destination.getDynamicTopicPartitionNum()); + destination.getDynamicTopicPartitionNum()); if (partitionNum == null) { partitionNum = destination.getPartitionsNum(); } + // 创建多分区topic + if (pulsarAdmin != null && partitionNum != null && partitionNum > 0 && PRODUCERS.get(topicName) == null) { + createMultipleTopic(topicName, partitionNum); + } + ExecutorTemplate template = new ExecutorTemplate(sendPartitionExecutor); // 并发构造 MQMessageUtils.EntryRowData[] datas = MQMessageUtils.buildMessageData(message, buildExecutor); @@ -195,10 +220,10 @@ public void send(final MQDestination destination, String topicName, com.alibaba. } // 串行分区 com.alibaba.otter.canal.protocol.Message[] messages = MQMessageUtils.messagePartition(datas, - message.getId(), - partitionNum, - destination.getPartitionHash(), - mqProperties.isDatabaseHash()); + message.getId(), + partitionNum, + destination.getPartitionHash(), + mqProperties.isDatabaseHash()); // 发送 int len = messages.length; for (int i = 0; i < len; i++) { @@ -228,9 +253,9 @@ public void send(final MQDestination destination, String topicName, com.alibaba. for (FlatMessage flatMessage : flatMessages) { FlatMessage[] partitionFlatMessage = MQMessageUtils.messagePartition(flatMessage, - partitionNum, - destination.getPartitionHash(), - mqProperties.isDatabaseHash()); + partitionNum, + destination.getPartitionHash(), + mqProperties.isDatabaseHash()); int length = partitionFlatMessage.length; for (int i = 0; i < length; i++) { // 增加null判断,issue #3267 @@ -264,9 +289,9 @@ public void send(final MQDestination destination, String topicName, com.alibaba. /** * 发送原始消息,需要做分区处理 * - * @param topic topic + * @param topic topic * @param partitionNum 目标分区 - * @param msg 原始消息内容 + * @param msg 原始消息内容 * @return void * @date 2021/9/10 17:55 * @author chad @@ -277,8 +302,9 @@ private void sendMessage(String topic, int partitionNum, com.alibaba.otter.canal byte[] msgBytes = CanalMessageSerializerUtil.serializer(msg, mqProperties.isFilterTransactionEntry()); try { MessageId msgResultId = producer.newMessage() - .property(MSG_PROPERTY_PARTITION_NAME, String.valueOf(partitionNum)) - .value(msgBytes).send(); + .property(MSG_PROPERTY_PARTITION_NAME, String.valueOf(partitionNum)) + .value(msgBytes) + .send(); // todo 判断发送结果 if (logger.isDebugEnabled()) { logger.debug("Send Message to topic:{} Result: {}", topic, msgResultId); @@ -291,7 +317,7 @@ private void sendMessage(String topic, int partitionNum, com.alibaba.otter.canal /** * 发送扁平消息 * - * @param topic topic主题 + * @param topic topic主题 * @param flatMessages 扁平消息 * @return void * @date 2021/9/10 18:22 @@ -302,13 +328,12 @@ private void sendMessage(String topic, int partition, List flatMess Producer producer = getProducer(topic); for (FlatMessage f : flatMessages) { try { - MessageId msgResultId = producer - .newMessage() - .property(MSG_PROPERTY_PARTITION_NAME, String.valueOf(partition)) - .value(JSON.toJSONBytes(f, SerializerFeature.WriteMapNullValue)) - .send() - // - ; + MessageId msgResultId = producer.newMessage() + .property(MSG_PROPERTY_PARTITION_NAME, String.valueOf(partition)) + .value(JSON.toJSONBytes(f, Feature.WriteNulls)) + .send() + // + ; if (logger.isDebugEnabled()) { logger.debug("Send Messages to topic:{} Result: {}", topic, msgResultId); } @@ -319,22 +344,44 @@ private void sendMessage(String topic, int partition, List flatMess } /** - * 获取指定topic的生产者,并且使用缓存 - * + * 创建多分区topic + * * @param topic - * @return org.apache.pulsar.client.api.Producer - * @date 2021/9/10 11:21 - * @author chad - * @since 1 by chad at 2021/9/10 新增 + * @param partitionNum + */ + private void createMultipleTopic(String topic, Integer partitionNum) { + // 拼接topic前缀 + PulsarMQProducerConfig pulsarMQProperties = (PulsarMQProducerConfig) this.mqProperties; + String prefix = pulsarMQProperties.getTopicTenantPrefix(); + String fullTopic = topic; + if (!StringUtils.isEmpty(prefix)) { + if (!prefix.endsWith("/")) { + fullTopic = "/" + fullTopic; + } + fullTopic = pulsarMQProperties.getTopicTenantPrefix() + fullTopic; + } + + // 创建分区topic + try { + pulsarAdmin.topics().createPartitionedTopic(fullTopic, partitionNum); + } catch (PulsarAdminException e) { + // TODO 无论是否报错,都继续后续的操作,此处不进行阻塞 + } + } + + /** + * 获取topic + * + * @param topic + * @return */ private Producer getProducer(String topic) { Producer producer = PRODUCERS.get(topic); - - if (null == producer) { + if (null == producer || !producer.isConnected()) { try { synchronized (PRODUCERS) { producer = PRODUCERS.get(topic); - if (null != producer) { + if (null != producer && producer.isConnected()) { return producer; } @@ -351,10 +398,10 @@ private Producer getProducer(String topic) { // 创建指定topic的生产者 producer = client.newProducer() - .topic(fullTopic) - // 指定路由器 - .messageRouter(new MessageRouterImpl(topic)) - .create(); + .topic(fullTopic) + // 指定路由器 + .messageRouter(new MessageRouterImpl(topic)) + .create(); // 放入缓存 PRODUCERS.put(topic, producer); } @@ -367,6 +414,26 @@ private Producer getProducer(String topic) { return producer; } + @Override + public void stop() { + logger.info("## Stop PulsarMQ producer##"); + + for (Producer p : PRODUCERS.values()) { + try { + if (null != p && p.isConnected()) { + p.close(); + } + } catch (PulsarClientException e) { + logger.warn("close producer name: {}, topic: {}, error: {}", + p.getProducerName(), + p.getTopic(), + e.getMessage()); + } + } + + super.stop(); + } + /** * Pulsar自定义路由策略 * @@ -376,9 +443,10 @@ private Producer getProducer(String topic) { * @since 2 by chad at 2021/9/17 修改为msg自带目标分区 */ private static class MessageRouterImpl implements MessageRouter { + private String topicLocal; - public MessageRouterImpl(String topicLocal) { + public MessageRouterImpl(String topicLocal){ this.topicLocal = topicLocal; } @@ -390,7 +458,8 @@ public int choosePartition(Message msg, TopicMetadata metadata) { try { partition = Integer.parseInt(partitionStr); } catch (NumberFormatException e) { - logger.warn("Parse msg {} property failed for value: {}", MSG_PROPERTY_PARTITION_NAME, partitionStr); + logger + .warn("Parse msg {} property failed for value: {}", MSG_PROPERTY_PARTITION_NAME, partitionStr); } } // topic创建时设置的分区数 @@ -402,21 +471,4 @@ public int choosePartition(Message msg, TopicMetadata metadata) { return partition; } } - - @Override - public void stop() { - logger.info("## Stop RocketMQ producer##"); - - for (Producer p : PRODUCERS.values()) { - try { - if (null != p && p.isConnected()) { - p.close(); - } - } catch (PulsarClientException e) { - logger.warn("close producer name: {}, topic: {}, error: {}", p.getProducerName(), p.getTopic(), e.getMessage()); - } - } - - super.stop(); - } } diff --git a/connector/pulsarmq-connector/src/main/resources/META-INF/canal/com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer b/connector/pulsarmq-connector/src/main/resources/META-INF/canal/com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer new file mode 100644 index 0000000000..54cdb8aab6 --- /dev/null +++ b/connector/pulsarmq-connector/src/main/resources/META-INF/canal/com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer @@ -0,0 +1 @@ +pulsarmq=com.alibaba.otter.canal.connector.pulsarmq.consumer.CanalPulsarMQConsumer \ No newline at end of file diff --git a/connector/pulsarmq-connector/src/test/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumerTest.java b/connector/pulsarmq-connector/src/test/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumerTest.java index 463d0bde13..b1a8b2ced3 100644 --- a/connector/pulsarmq-connector/src/test/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumerTest.java +++ b/connector/pulsarmq-connector/src/test/java/com/alibaba/otter/canal/connector/pulsarmq/consumer/CanalPulsarMQConsumerTest.java @@ -1,23 +1,19 @@ package com.alibaba.otter.canal.connector.pulsarmq.consumer; -import com.alibaba.fastjson.JSON; -import com.alibaba.otter.canal.connector.core.config.CanalConstants; -import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; -import com.alibaba.otter.canal.connector.pulsarmq.config.PulsarMQConstants; -import org.apache.commons.lang.StringUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.time.LocalDateTime; import java.time.LocalTime; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Properties; -import java.util.UUID; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.*; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.otter.canal.connector.core.config.CanalConstants; +import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; +import com.alibaba.otter.canal.connector.pulsarmq.config.PulsarMQConstants; /** * @author chad @@ -26,7 +22,7 @@ */ public class CanalPulsarMQConsumerTest { - private Properties properties; + private Properties properties; private CanalPulsarMQConsumer consumer; @@ -73,4 +69,4 @@ public void getMessage() { consumer.ack(); } } -} \ No newline at end of file +} diff --git a/connector/rabbitmq-connector/pom.xml b/connector/rabbitmq-connector/pom.xml index f7fff00fe9..7255f0ec57 100644 --- a/connector/rabbitmq-connector/pom.xml +++ b/connector/rabbitmq-connector/pom.xml @@ -3,7 +3,7 @@ canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml @@ -29,12 +29,12 @@ com.rabbitmq amqp-client - 5.5.0 + ${rabbitmq_version} com.alibaba.mq-amqp mq-amqp-client - 1.0.3 + ${mq_amqp_client} junit @@ -94,4 +94,4 @@ -
\ No newline at end of file +
diff --git a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQConstants.java b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQConstants.java index f6b6f8e633..8a2c797de5 100644 --- a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQConstants.java +++ b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQConstants.java @@ -15,6 +15,9 @@ public class RabbitMQConstants { public static final String RABBITMQ_VIRTUAL_HOST = ROOT + "." + "virtual.host"; public static final String RABBITMQ_USERNAME = ROOT + "." + "username"; public static final String RABBITMQ_PASSWORD = ROOT + "." + "password"; + public static final String RABBITMQ_QUEUE = ROOT + "." + "queue"; + public static final String RABBITMQ_ROUTING_KEY = ROOT + "." + "routingKey"; + public static final String RABBITMQ_DELIVERY_MODE = ROOT + "." + "deliveryMode"; public static final String RABBITMQ_RESOURCE_OWNERID = ROOT + "." + "rabbitmq.resource.ownerId"; } diff --git a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQProducerConfig.java b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQProducerConfig.java index 57a9d12d8d..8dcde5cabd 100644 --- a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQProducerConfig.java +++ b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/config/RabbitMQProducerConfig.java @@ -15,6 +15,9 @@ public class RabbitMQProducerConfig extends MQProperties { private String exchange; private String username; private String password; + private String queue; + private String routingKey; + private String deliveryMode; public String getHost() { return host; @@ -55,4 +58,28 @@ public String getPassword() { public void setPassword(String password) { this.password = password; } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public String getRoutingKey() { + return routingKey; + } + + public void setRoutingKey(String routingKey) { + this.routingKey = routingKey; + } + + public String getDeliveryMode() { + return deliveryMode; + } + + public void setDeliveryMode(String deliveryMode) { + this.deliveryMode = deliveryMode; + } } diff --git a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/consumer/CanalRabbitMQConsumer.java b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/consumer/CanalRabbitMQConsumer.java index ebd3fa03d6..5b7f774a5f 100644 --- a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/consumer/CanalRabbitMQConsumer.java +++ b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/consumer/CanalRabbitMQConsumer.java @@ -9,11 +9,11 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import com.alibaba.otter.canal.common.utils.PropertiesUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; +import com.alibaba.otter.canal.common.utils.PropertiesUtils; import com.alibaba.otter.canal.connector.core.config.CanalConstants; import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; import com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer; @@ -24,13 +24,7 @@ import com.alibaba.otter.canal.connector.rabbitmq.producer.AliyunCredentialsProvider; import com.alibaba.otter.canal.protocol.Message; import com.alibaba.otter.canal.protocol.exception.CanalClientException; -import com.rabbitmq.client.AMQP; -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.Connection; -import com.rabbitmq.client.ConnectionFactory; -import com.rabbitmq.client.Consumer; -import com.rabbitmq.client.DefaultConsumer; -import com.rabbitmq.client.Envelope; +import com.rabbitmq.client.*; /** * RabbitMQ consumer SPI 实现 @@ -41,7 +35,8 @@ @SPI("rabbitmq") public class CanalRabbitMQConsumer implements CanalMsgConsumer { - private static final Logger logger = LoggerFactory.getLogger(CanalRabbitMQConsumer.class); + private static final Logger logger = LoggerFactory + .getLogger(CanalRabbitMQConsumer.class); // 链接地址 private String nameServer; @@ -91,7 +86,7 @@ public void connect() { factory.setUsername(username); factory.setPassword(password); } - //解析出端口 modified by 16075140 + // 解析出端口 modified by 16075140 if (nameServer != null && nameServer.contains(":")) { String[] serverHostAndPort = nameServer.split(":"); factory.setHost(serverHostAndPort[0]); diff --git a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/producer/CanalRabbitMQProducer.java b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/producer/CanalRabbitMQProducer.java index a99057465a..a75434d44f 100644 --- a/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/producer/CanalRabbitMQProducer.java +++ b/connector/rabbitmq-connector/src/main/java/com/alibaba/otter/canal/connector/rabbitmq/producer/CanalRabbitMQProducer.java @@ -10,8 +10,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter; import com.alibaba.otter.canal.common.CanalException; import com.alibaba.otter.canal.common.utils.ExecutorTemplate; import com.alibaba.otter.canal.common.utils.PropertiesUtils; @@ -26,10 +26,7 @@ import com.alibaba.otter.canal.connector.rabbitmq.config.RabbitMQProducerConfig; import com.alibaba.otter.canal.protocol.FlatMessage; import com.alibaba.otter.canal.protocol.Message; -import com.rabbitmq.client.AlreadyClosedException; -import com.rabbitmq.client.Channel; -import com.rabbitmq.client.Connection; -import com.rabbitmq.client.ConnectionFactory; +import com.rabbitmq.client.*; /** * RabbitMQ Producer SPI 实现 @@ -75,7 +72,13 @@ public void init(Properties properties) { try { connect = factory.newConnection(); channel = connect.createChannel(); - // channel.exchangeDeclare(mqProperties.getExchange(), "topic"); + channel.queueDeclare(rabbitMQProperties.getQueue(), true, false, false, null); + channel.exchangeDeclare(rabbitMQProperties + .getExchange(), rabbitMQProperties.getDeliveryMode(), true, false, false, null); + channel.queueBind(rabbitMQProperties.getQueue(), + rabbitMQProperties.getExchange(), + rabbitMQProperties.getRoutingKey()); + } catch (IOException | TimeoutException ex) { throw new CanalException("Start RabbitMQ producer error", ex); } @@ -106,6 +109,18 @@ private void loadRabbitMQProperties(Properties properties) { if (!StringUtils.isEmpty(password)) { rabbitMQProperties.setPassword(password); } + String queue = PropertiesUtils.getProperty(properties, RabbitMQConstants.RABBITMQ_QUEUE); + if (!StringUtils.isEmpty(queue)) { + rabbitMQProperties.setQueue(queue); + } + String routingKey = PropertiesUtils.getProperty(properties, RabbitMQConstants.RABBITMQ_ROUTING_KEY); + if (!StringUtils.isEmpty(routingKey)) { + rabbitMQProperties.setRoutingKey(routingKey); + } + String deliveryMode = PropertiesUtils.getProperty(properties, RabbitMQConstants.RABBITMQ_DELIVERY_MODE); + if (!StringUtils.isEmpty(deliveryMode)) { + rabbitMQProperties.setDeliveryMode(deliveryMode); + } } @Override @@ -114,9 +129,8 @@ public void send(final MQDestination destination, Message message, Callback call try { if (!StringUtils.isEmpty(destination.getDynamicTopic())) { // 动态topic - Map messageMap = MQMessageUtils.messageTopics(message, - destination.getTopic(), - destination.getDynamicTopic()); + Map messageMap = MQMessageUtils + .messageTopics(message, destination.getTopic(), destination.getDynamicTopic()); for (Map.Entry entry : messageMap.entrySet()) { final String topicName = entry.getKey().replace('.', '_'); @@ -151,7 +165,7 @@ private void send(MQDestination canalDestination, String topicName, Message mess // 串行分区 List flatMessages = MQMessageUtils.messageConverter(datas, messageSub.getId()); for (FlatMessage flatMessage : flatMessages) { - byte[] message = JSON.toJSONBytes(flatMessage, SerializerFeature.WriteMapNullValue); + byte[] message = JSON.toJSONBytes(flatMessage, JSONWriter.Feature.WriteNulls); if (logger.isDebugEnabled()) { logger.debug("send message:{} to destination:{}", message, canalDestination.getCanalDestination()); } @@ -165,7 +179,10 @@ private void sendMessage(String queueName, byte[] message) { // tips: 目前逻辑中暂不处理对exchange处理,请在Console后台绑定 才可使用routekey try { RabbitMQProducerConfig rabbitMQProperties = (RabbitMQProducerConfig) this.mqProperties; - channel.basicPublish(rabbitMQProperties.getExchange(), queueName, null, message); + channel.basicPublish(rabbitMQProperties.getExchange(), + queueName, + MessageProperties.PERSISTENT_TEXT_PLAIN, + message); } catch (Throwable e) { throw new RuntimeException(e); } diff --git a/connector/rocketmq-connector/pom.xml b/connector/rocketmq-connector/pom.xml index b2b140c29c..82388e3279 100644 --- a/connector/rocketmq-connector/pom.xml +++ b/connector/rocketmq-connector/pom.xml @@ -3,7 +3,7 @@ canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml @@ -13,10 +13,6 @@ jar canal connector rocketMQ module for otter ${project.version} - - 4.8.0 - - com.alibaba.otter @@ -99,4 +95,4 @@ - \ No newline at end of file + diff --git a/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/consumer/CanalRocketMQConsumer.java b/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/consumer/CanalRocketMQConsumer.java index e3f969cbf5..3637312567 100644 --- a/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/consumer/CanalRocketMQConsumer.java +++ b/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/consumer/CanalRocketMQConsumer.java @@ -21,7 +21,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson2.JSON; import com.alibaba.otter.canal.connector.core.config.CanalConstants; import com.alibaba.otter.canal.connector.core.consumer.CommonMessage; import com.alibaba.otter.canal.connector.core.spi.CanalMsgConsumer; diff --git a/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/producer/CanalRocketMQProducer.java b/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/producer/CanalRocketMQProducer.java index e08a3b9769..824304e996 100644 --- a/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/producer/CanalRocketMQProducer.java +++ b/connector/rocketmq-connector/src/main/java/com/alibaba/otter/canal/connector/rocketmq/producer/CanalRocketMQProducer.java @@ -1,7 +1,7 @@ package com.alibaba.otter.canal.connector.rocketmq.producer; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONWriter; import com.alibaba.otter.canal.common.CanalException; import com.alibaba.otter.canal.common.utils.ExecutorTemplate; import com.alibaba.otter.canal.common.utils.NamedThreadFactory; @@ -263,7 +263,7 @@ public void send(final MQDestination destination, String topicName, List messages = flatMessagePart.stream() .map(flatMessage -> new Message(topicName, ((RocketMQProducerConfig) this.mqProperties).getTag(), - JSON.toJSONBytes(flatMessage, SerializerFeature.WriteMapNullValue))) + JSON.toJSONBytes(flatMessage, JSONWriter.Feature.WriteNulls))) .collect(Collectors.toList()); // 批量发送 sendMessage(messages, index); @@ -278,7 +278,7 @@ public void send(final MQDestination destination, String topicName, List messages = flatMessages.stream() .map(flatMessage -> new Message(topicName, ((RocketMQProducerConfig) this.mqProperties).getTag(), - JSON.toJSONBytes(flatMessage, SerializerFeature.WriteMapNullValue))) + JSON.toJSONBytes(flatMessage, JSONWriter.Feature.WriteNulls))) .collect(Collectors.toList()); // 批量发送 sendMessage(messages, partition); diff --git a/connector/tcp-connector/pom.xml b/connector/tcp-connector/pom.xml index 9838a21ebb..d15e0143f2 100644 --- a/connector/tcp-connector/pom.xml +++ b/connector/tcp-connector/pom.xml @@ -3,7 +3,7 @@ canal.connector com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml diff --git a/connector/tcp-connector/src/main/java/com/alibaba/otter/canal/connector/tcp/consumer/CanalTCPConsumer.java b/connector/tcp-connector/src/main/java/com/alibaba/otter/canal/connector/tcp/consumer/CanalTCPConsumer.java index bc7966bfa5..a773363a4b 100644 --- a/connector/tcp-connector/src/main/java/com/alibaba/otter/canal/connector/tcp/consumer/CanalTCPConsumer.java +++ b/connector/tcp-connector/src/main/java/com/alibaba/otter/canal/connector/tcp/consumer/CanalTCPConsumer.java @@ -1,5 +1,13 @@ package com.alibaba.otter.canal.connector.tcp.consumer; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.StringUtils; + import com.alibaba.otter.canal.client.CanalConnector; import com.alibaba.otter.canal.client.impl.ClusterCanalConnector; import com.alibaba.otter.canal.client.impl.ClusterNodeAccessStrategy; @@ -12,14 +20,6 @@ import com.alibaba.otter.canal.connector.tcp.config.TCPConstants; import com.alibaba.otter.canal.protocol.Message; -import org.apache.commons.lang.StringUtils; - -import java.net.InetSocketAddress; -import java.net.SocketAddress; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.TimeUnit; - /** * TCP 消费者连接器, 一个destination对应一个SPI实例 * @@ -99,7 +99,9 @@ public void ack() { @Override public void disconnect() { - canalConnector.unsubscribe(); + // tcp模式下,因为是单tcp消费,避免adapter异常断开时直接unsubscribe + // unsubscribe发送给canal-server会导致清理cursor位点,如果此时canal-server出现重启,就会丢失binlog数据 + // canalConnector.unsubscribe(); canalConnector.disconnect(); } } diff --git a/dbsync/pom.xml b/dbsync/pom.xml index 76421c383a..7a7adf2e7e 100644 --- a/dbsync/pom.xml +++ b/dbsync/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter @@ -33,6 +33,14 @@ org.slf4j slf4j-api + + org.apache.commons + commons-compress + + + com.github.luben + zstd-jni + junit diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/CharsetConversion.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/CharsetConversion.java index 725abe60e6..c81dcc6f4f 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/CharsetConversion.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/CharsetConversion.java @@ -21,6 +21,7 @@ static final class Entry { protected final String mysqlCharset; protected final String mysqlCollation; protected final String javaCharset; + protected final Charset charset; Entry(final int id, String mysqlCharset, // NL String mysqlCollation, String javaCharset){ @@ -28,6 +29,7 @@ static final class Entry { this.mysqlCharset = mysqlCharset; this.mysqlCollation = mysqlCollation; this.javaCharset = javaCharset; + this.charset = Charset.isSupported(javaCharset) ? Charset.forName(javaCharset) : null; } } @@ -390,6 +392,23 @@ public static String getJavaCharset(final int id) { } } + public static Charset getNioCharset(final int id) { + Entry entry = getEntry(id); + + if (entry != null) { + if (entry.charset != null) { + return entry.charset; + } else { + logger.warn("Unknown java charset for: id = " + id + ", name = " + entry.mysqlCharset + ", coll = " + + entry.mysqlCollation); + return null; + } + } else { + logger.warn("Unexpect mysql charset: " + id); + return null; + } + } + public static void main(String[] args) { for (int i = 0; i < entries.length; i++) { Entry entry = entries[i]; diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonConversion.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonConversion.java index f04daa1d51..bcfd26b688 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonConversion.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonConversion.java @@ -1,5 +1,7 @@ package com.taobao.tddl.dbsync.binlog; +import java.nio.charset.Charset; + import static com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer.appendNumber2; import static com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer.appendNumber4; import static com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer.usecondsToStr; @@ -57,23 +59,27 @@ public class JsonConversion { public static final int VALUE_ENTRY_SIZE_LARGE = (1 + LARGE_OFFSET_SIZE); public static Json_Value parse_value(int type, LogBuffer buffer, long len, String charsetName) { + return parse_value(type, buffer, len, Charset.forName(charsetName)); + } + + public static Json_Value parse_value(int type, LogBuffer buffer, long len, Charset charset) { buffer = buffer.duplicate(buffer.position(), (int) len); switch (type) { case JSONB_TYPE_SMALL_OBJECT: - return parse_array_or_object(Json_enum_type.OBJECT, buffer, len, false, charsetName); + return parse_array_or_object(Json_enum_type.OBJECT, buffer, len, false, charset); case JSONB_TYPE_LARGE_OBJECT: - return parse_array_or_object(Json_enum_type.OBJECT, buffer, len, true, charsetName); + return parse_array_or_object(Json_enum_type.OBJECT, buffer, len, true, charset); case JSONB_TYPE_SMALL_ARRAY: - return parse_array_or_object(Json_enum_type.ARRAY, buffer, len, false, charsetName); + return parse_array_or_object(Json_enum_type.ARRAY, buffer, len, false, charset); case JSONB_TYPE_LARGE_ARRAY: - return parse_array_or_object(Json_enum_type.ARRAY, buffer, len, true, charsetName); + return parse_array_or_object(Json_enum_type.ARRAY, buffer, len, true, charset); default: - return parse_scalar(type, buffer, len, charsetName); + return parse_scalar(type, buffer, len, charset); } } private static Json_Value parse_array_or_object(Json_enum_type type, LogBuffer buffer, long len, boolean large, - String charsetName) { + Charset charset) { long offset_size = large ? LARGE_OFFSET_SIZE : SMALL_OFFSET_SIZE; if (len < 2 * offset_size) { throw new IllegalArgumentException("illegal json data"); @@ -100,7 +106,7 @@ private static long read_offset_or_size(LogBuffer buffer, boolean large) { return large ? buffer.getUint32() : buffer.getUint16(); } - private static Json_Value parse_scalar(int type, LogBuffer buffer, long len, String charsetName) { + private static Json_Value parse_scalar(int type, LogBuffer buffer, long len, Charset charset) { switch (type) { case JSONB_TYPE_LITERAL: /* purecov: inspected */ @@ -156,7 +162,7 @@ private static Json_Value parse_scalar(int type, LogBuffer buffer, long len, Str } return new Json_Value(Json_enum_type.STRING, buffer.rewind() .forward((int) n) - .getFixString((int) str_len, charsetName)); + .getFixString((int) str_len, charset)); case JSONB_TYPE_OPAQUE: /* * There should always be at least one byte, which tells the @@ -242,7 +248,7 @@ public Json_Value(Json_enum_type t, LogBuffer data, long element_count, long byt this.m_large = large; } - public String key(int i, String charsetName) { + public String key(int i, Charset charset) { m_data.rewind(); int offset_size = m_large ? LARGE_OFFSET_SIZE : SMALL_OFFSET_SIZE; int key_entry_size = m_large ? KEY_ENTRY_SIZE_LARGE : KEY_ENTRY_SIZE_SMALL; @@ -255,10 +261,10 @@ public String key(int i, String charsetName) { // entry, always two // bytes. long key_length = m_data.getUint16(); - return m_data.rewind().forward((int) key_offset).getFixString((int) key_length, charsetName); + return m_data.rewind().forward((int) key_offset).getFixString((int) key_length, charset); } - public Json_Value element(int i, String charsetName) { + public Json_Value element(int i, Charset charset) { m_data.rewind(); int offset_size = m_large ? LARGE_OFFSET_SIZE : SMALL_OFFSET_SIZE; int key_entry_size = m_large ? KEY_ENTRY_SIZE_LARGE : KEY_ENTRY_SIZE_SMALL; @@ -271,13 +277,13 @@ public Json_Value element(int i, String charsetName) { int type = m_data.forward(entry_offset).getUint8(); if (type == JSONB_TYPE_INT16 || type == JSONB_TYPE_UINT16 || type == JSONB_TYPE_LITERAL || (m_large && (type == JSONB_TYPE_INT32 || type == JSONB_TYPE_UINT32))) { - return parse_scalar(type, m_data, value_entry_size - 1, charsetName); + return parse_scalar(type, m_data, value_entry_size - 1, charset); } int value_offset = (int) read_offset_or_size(m_data, m_large); - return parse_value(type, m_data.rewind().forward(value_offset), (int) m_length - value_offset, charsetName); + return parse_value(type, m_data.rewind().forward(value_offset), (int) m_length - value_offset, charset); } - public StringBuilder toJsonString(StringBuilder buf, String charsetName) { + public StringBuilder toJsonString(StringBuilder buf, Charset charset) { switch (m_type) { case OBJECT: buf.append("{"); @@ -285,9 +291,9 @@ public StringBuilder toJsonString(StringBuilder buf, String charsetName) { if (i > 0) { buf.append(", "); } - buf.append('"').append(key(i, charsetName)).append('"'); + buf.append('"').append(key(i, charset)).append('"'); buf.append(": "); - element(i, charsetName).toJsonString(buf, charsetName); + element(i, charset).toJsonString(buf, charset); } buf.append("}"); break; @@ -297,7 +303,7 @@ public StringBuilder toJsonString(StringBuilder buf, String charsetName) { if (i > 0) { buf.append(", "); } - element(i, charsetName).toJsonString(buf, charsetName); + element(i, charset).toJsonString(buf, charset); } buf.append("]"); break; @@ -398,7 +404,7 @@ public StringBuilder toJsonString(StringBuilder buf, String charsetName) { } buf.append('"').append(text).append('"'); } else { - text = m_data.getFixString((int) m_length, charsetName); + text = m_data.getFixString((int) m_length, charset); buf.append('"').append(escapse(text)).append('"'); } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversion.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversion.java index 878a075174..eb8f6ddec4 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversion.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversion.java @@ -1,5 +1,6 @@ package com.taobao.tddl.dbsync.binlog; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; @@ -36,6 +37,11 @@ public class JsonDiffConversion { public static StringBuilder print_json_diff(LogBuffer buffer, long len, String columnName, int columnIndex, String charsetName) { + return print_json_diff(buffer, len, columnName, columnIndex, Charset.forName(charsetName)); + } + + public static StringBuilder print_json_diff(LogBuffer buffer, long len, String columnName, int columnIndex, + Charset charset) { int position = buffer.position(); List operation_names = new ArrayList<>(); while (buffer.hasRemaining()) { @@ -111,14 +117,14 @@ public static StringBuilder print_json_diff(LogBuffer buffer, long len, String c Json_Value jsonValue = JsonConversion.parse_value(buffer.getUint8(), buffer, value_length - 1, - charsetName); + charset); buffer.forward((int) value_length - 1); // Read value if (jsonValue.m_type == Json_enum_type.ERROR) { throw new IllegalArgumentException("parsing json value"); } StringBuilder jsonBuilder = new StringBuilder(); - jsonValue.toJsonString(jsonBuilder, charsetName); + jsonValue.toJsonString(jsonBuilder, charset); builder.append(jsonBuilder); } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java index 408eaa4ebd..b6e1457d84 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogBuffer.java @@ -1,13 +1,18 @@ package com.taobao.tddl.dbsync.binlog; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; -import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.math.BigInteger; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.BitSet; +import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream; +import org.apache.commons.io.IOUtils; + /** * TODO: Document Me!! * @@ -16,6 +21,8 @@ */ public class LogBuffer { + static final BigDecimal DECIMAL_ZERO_1_SCALE = BigDecimal.valueOf(0, 1); + static final BigDecimal DECIMAL_ONE_1_SCALE = BigDecimal.valueOf(10, 1); protected byte[] buffer; protected int origin, limit; @@ -26,7 +33,9 @@ protected LogBuffer(){ } public LogBuffer(byte[] buffer, final int origin, final int limit){ - if (origin + limit > buffer.length) throw new IllegalArgumentException("capacity excceed: " + (origin + limit)); + if (origin + limit > buffer.length) { + throw new IllegalArgumentException("capacity excceed: " + (origin + limit)); + } this.buffer = buffer; this.origin = origin; @@ -38,7 +47,9 @@ public LogBuffer(byte[] buffer, final int origin, final int limit){ * Return n bytes in this buffer. */ public final LogBuffer duplicate(final int pos, final int len) { - if (pos + len > limit) throw new IllegalArgumentException("limit excceed: " + (pos + len)); + if (pos + len > limit) { + throw new IllegalArgumentException("limit excceed: " + (pos + len)); + } // XXX: Do momery copy avoid buffer modified. final int off = origin + pos; @@ -50,8 +61,9 @@ public final LogBuffer duplicate(final int pos, final int len) { * Return next n bytes in this buffer. */ public final LogBuffer duplicate(final int len) { - if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len - origin)); + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } // XXX: Do momery copy avoid buffer modified. final int end = position + len; @@ -99,7 +111,9 @@ public final int position() { * newPosition do not hold */ public final LogBuffer position(final int newPosition) { - if (newPosition > limit || newPosition < 0) throw new IllegalArgumentException("limit excceed: " + newPosition); + if (newPosition > limit || newPosition < 0) { + throw new IllegalArgumentException("limit excceed: " + newPosition); + } this.position = origin + newPosition; return this; @@ -112,8 +126,9 @@ public final LogBuffer position(final int newPosition) { * @return This buffer */ public final LogBuffer forward(final int len) { - if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len - origin)); + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } this.position += len; return this; @@ -205,7 +220,9 @@ public final boolean hasRemaining() { * Return 8-bit signed int from buffer. */ public final int getInt8(final int pos) { - if (pos >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " + pos); + if (pos >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + pos); + } return buffer[origin + pos]; } @@ -214,7 +231,9 @@ public final int getInt8(final int pos) { * Return next 8-bit signed int from buffer. */ public final int getInt8() { - if (position >= origin + limit) throw new IllegalArgumentException("limit excceed: " + (position - origin)); + if (position >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin)); + } return buffer[position++]; } @@ -223,7 +242,9 @@ public final int getInt8() { * Return 8-bit unsigned int from buffer. */ public final int getUint8(final int pos) { - if (pos >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " + pos); + if (pos >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + pos); + } return 0xff & buffer[origin + pos]; } @@ -232,7 +253,9 @@ public final int getUint8(final int pos) { * Return next 8-bit unsigned int from buffer. */ public final int getUint8() { - if (position >= origin + limit) throw new IllegalArgumentException("limit excceed: " + (position - origin)); + if (position >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin)); + } return 0xff & buffer[position++]; } @@ -245,8 +268,9 @@ public final int getUint8() { public final int getInt16(final int pos) { final int position = origin + pos; - if (pos + 1 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 1))); + if (pos + 1 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 1))); + } byte[] buf = buffer; return (0xff & buf[position]) | ((buf[position + 1]) << 8); @@ -258,8 +282,9 @@ public final int getInt16(final int pos) { * @see mysql-5.1.60/include/my_global.h - sint2korr */ public final int getInt16() { - if (position + 1 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 1)); + if (position + 1 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 1)); + } byte[] buf = buffer; return (0xff & buf[position++]) | ((buf[position++]) << 8); @@ -273,8 +298,9 @@ public final int getInt16() { public final int getUint16(final int pos) { final int position = origin + pos; - if (pos + 1 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 1))); + if (pos + 1 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 1))); + } byte[] buf = buffer; return (0xff & buf[position]) | ((0xff & buf[position + 1]) << 8); @@ -286,8 +312,9 @@ public final int getUint16(final int pos) { * @see mysql-5.1.60/include/my_global.h - uint2korr */ public final int getUint16() { - if (position + 1 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 1)); + if (position + 1 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 1)); + } byte[] buf = buffer; return (0xff & buf[position++]) | ((0xff & buf[position++]) << 8); @@ -300,9 +327,9 @@ public final int getUint16() { */ public final int getBeInt16(final int pos) { final int position = origin + pos; - - if (pos + 1 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 1))); + if (pos + 1 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 1))); + } byte[] buf = buffer; return (0xff & buf[position + 1]) | ((buf[position]) << 8); @@ -314,8 +341,9 @@ public final int getBeInt16(final int pos) { * @see mysql-5.1.60/include/my_global.h - mi_sint2korr */ public final int getBeInt16() { - if (position + 1 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 1)); + if (position + 1 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 1)); + } byte[] buf = buffer; return (buf[position++] << 8) | (0xff & buf[position++]); @@ -328,9 +356,9 @@ public final int getBeInt16() { */ public final int getBeUint16(final int pos) { final int position = origin + pos; - - if (pos + 1 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 1))); + if (pos + 1 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 1))); + } byte[] buf = buffer; return (0xff & buf[position + 1]) | ((0xff & buf[position]) << 8); @@ -342,8 +370,9 @@ public final int getBeUint16(final int pos) { * @see mysql-5.6.10/include/myisampack.h - mi_usint2korr */ public final int getBeUint16() { - if (position + 1 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 1)); + if (position + 1 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 1)); + } byte[] buf = buffer; return ((0xff & buf[position++]) << 8) | (0xff & buf[position++]); @@ -357,8 +386,9 @@ public final int getBeUint16() { public final int getInt24(final int pos) { final int position = origin + pos; - if (pos + 2 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 2))); + if (pos + 2 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 2))); + } byte[] buf = buffer; return (0xff & buf[position]) | ((0xff & buf[position + 1]) << 8) | ((buf[position + 2]) << 16); @@ -370,8 +400,9 @@ public final int getInt24(final int pos) { * @see mysql-5.1.60/include/my_global.h - sint3korr */ public final int getInt24() { - if (position + 2 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 2)); + if (position + 2 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 2)); + } byte[] buf = buffer; return (0xff & buf[position++]) | ((0xff & buf[position++]) << 8) | ((buf[position++]) << 16); @@ -385,8 +416,9 @@ public final int getInt24() { public final int getBeInt24(final int pos) { final int position = origin + pos; - if (pos + 2 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 2))); + if (pos + 2 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 2))); + } byte[] buf = buffer; return (0xff & buf[position + 2]) | ((0xff & buf[position + 1]) << 8) | ((buf[position]) << 16); @@ -398,8 +430,9 @@ public final int getBeInt24(final int pos) { * @see mysql-5.6.10/include/myisampack.h - mi_usint3korr */ public final int getBeInt24() { - if (position + 2 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 2)); + if (position + 2 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 2)); + } byte[] buf = buffer; return ((buf[position++]) << 16) | ((0xff & buf[position++]) << 8) | (0xff & buf[position++]); @@ -413,8 +446,9 @@ public final int getBeInt24() { public final int getUint24(final int pos) { final int position = origin + pos; - if (pos + 2 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 2))); + if (pos + 2 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 2))); + } byte[] buf = buffer; return (0xff & buf[position]) | ((0xff & buf[position + 1]) << 8) | ((0xff & buf[position + 2]) << 16); @@ -426,8 +460,9 @@ public final int getUint24(final int pos) { * @see mysql-5.1.60/include/my_global.h - uint3korr */ public final int getUint24() { - if (position + 2 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 2)); + if (position + 2 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 2)); + } byte[] buf = buffer; return (0xff & buf[position++]) | ((0xff & buf[position++]) << 8) | ((0xff & buf[position++]) << 16); @@ -441,8 +476,9 @@ public final int getUint24() { public final int getBeUint24(final int pos) { final int position = origin + pos; - if (pos + 2 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 2))); + if (pos + 2 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 2))); + } byte[] buf = buffer; return (0xff & buf[position + 2]) | ((0xff & buf[position + 1]) << 8) | ((0xff & buf[position]) << 16); @@ -454,8 +490,9 @@ public final int getBeUint24(final int pos) { * @see mysql-5.6.10/include/myisampack.h - mi_usint3korr */ public final int getBeUint24() { - if (position + 2 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 2)); + if (position + 2 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 2)); + } byte[] buf = buffer; return ((0xff & buf[position++]) << 16) | ((0xff & buf[position++]) << 8) | (0xff & buf[position++]); @@ -469,8 +506,9 @@ public final int getBeUint24() { public final int getInt32(final int pos) { final int position = origin + pos; - if (pos + 3 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 3))); + if (pos + 3 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 3))); + } byte[] buf = buffer; return (0xff & buf[position]) | ((0xff & buf[position + 1]) << 8) | ((0xff & buf[position + 2]) << 16) @@ -485,8 +523,9 @@ public final int getInt32(final int pos) { public final int getBeInt32(final int pos) { final int position = origin + pos; - if (pos + 3 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 3))); + if (pos + 3 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 3))); + } byte[] buf = buffer; return (0xff & buf[position + 3]) | ((0xff & buf[position + 2]) << 8) | ((0xff & buf[position + 1]) << 16) @@ -499,8 +538,9 @@ public final int getBeInt32(final int pos) { * @see mysql-5.1.60/include/my_global.h - sint4korr */ public final int getInt32() { - if (position + 3 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 3)); + if (position + 3 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 3)); + } byte[] buf = buffer; return (0xff & buf[position++]) | ((0xff & buf[position++]) << 8) | ((0xff & buf[position++]) << 16) @@ -513,8 +553,9 @@ public final int getInt32() { * @see mysql-5.6.10/include/myisampack.h - mi_sint4korr */ public final int getBeInt32() { - if (position + 3 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 3)); + if (position + 3 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 3)); + } byte[] buf = buffer; return ((buf[position++]) << 24) | ((0xff & buf[position++]) << 16) | ((0xff & buf[position++]) << 8) @@ -529,8 +570,9 @@ public final int getBeInt32() { public final long getUint32(final int pos) { final int position = origin + pos; - if (pos + 3 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 3))); + if (pos + 3 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 3))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -545,8 +587,9 @@ public final long getUint32(final int pos) { public final long getBeUint32(final int pos) { final int position = origin + pos; - if (pos + 3 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 3))); + if (pos + 3 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 3))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 3])) | ((long) (0xff & buf[position + 2]) << 8) @@ -559,8 +602,9 @@ public final long getBeUint32(final int pos) { * @see mysql-5.1.60/include/my_global.h - uint4korr */ public final long getUint32() { - if (position + 3 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 3)); + if (position + 3 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 3)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -573,8 +617,9 @@ public final long getUint32() { * @see mysql-5.6.10/include/myisampack.h - mi_uint4korr */ public final long getBeUint32() { - if (position + 3 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 3)); + if (position + 3 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 3)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++]) << 24) | ((long) (0xff & buf[position++]) << 16) @@ -587,8 +632,9 @@ public final long getBeUint32() { public final long getUlong40(final int pos) { final int position = origin + pos; - if (pos + 4 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 4))); + if (pos + 4 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 4))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -600,8 +646,9 @@ public final long getUlong40(final int pos) { * Return next 40-bit unsigned int from buffer. (little-endian) */ public final long getUlong40() { - if (position + 4 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 4)); + if (position + 4 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 4)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -617,8 +664,9 @@ public final long getUlong40() { public final long getBeUlong40(final int pos) { final int position = origin + pos; - if (pos + 4 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 4))); + if (pos + 4 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 4))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 4])) | ((long) (0xff & buf[position + 3]) << 8) @@ -632,8 +680,9 @@ public final long getBeUlong40(final int pos) { * @see mysql-5.6.10/include/myisampack.h - mi_uint5korr */ public final long getBeUlong40() { - if (position + 4 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 4)); + if (position + 4 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 4)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++]) << 32) | ((long) (0xff & buf[position++]) << 24) @@ -649,8 +698,9 @@ public final long getBeUlong40() { public final long getLong48(final int pos) { final int position = origin + pos; - if (pos + 5 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 5))); + if (pos + 5 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 5))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -666,8 +716,9 @@ public final long getLong48(final int pos) { public final long getBeLong48(final int pos) { final int position = origin + pos; - if (pos + 5 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 5))); + if (pos + 5 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 5))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 5])) | ((long) (0xff & buf[position + 4]) << 8) @@ -681,8 +732,9 @@ public final long getBeLong48(final int pos) { * @see mysql-5.1.60/include/my_global.h - sint6korr */ public final long getLong48() { - if (position + 5 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 5)); + if (position + 5 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 5)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -696,8 +748,9 @@ public final long getLong48() { * @see mysql-5.6.10/include/myisampack.h - mi_sint6korr */ public final long getBeLong48() { - if (position + 5 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 5)); + if (position + 5 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 5)); + } byte[] buf = buffer; return ((long) (buf[position++]) << 40) | ((long) (0xff & buf[position++]) << 32) @@ -713,8 +766,9 @@ public final long getBeLong48() { public final long getUlong48(final int pos) { final int position = origin + pos; - if (pos + 5 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 5))); + if (pos + 5 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 5))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -730,8 +784,9 @@ public final long getUlong48(final int pos) { public final long getBeUlong48(final int pos) { final int position = origin + pos; - if (pos + 5 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 5))); + if (pos + 5 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 5))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 5])) | ((long) (0xff & buf[position + 4]) << 8) @@ -745,8 +800,9 @@ public final long getBeUlong48(final int pos) { * @see mysql-5.1.60/include/my_global.h - uint6korr */ public final long getUlong48() { - if (position + 5 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 5)); + if (position + 5 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 5)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -760,8 +816,9 @@ public final long getUlong48() { * @see mysql-5.6.10/include/myisampack.h - mi_uint6korr */ public final long getBeUlong48() { - if (position + 5 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 5)); + if (position + 5 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 5)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++]) << 40) | ((long) (0xff & buf[position++]) << 32) @@ -775,8 +832,9 @@ public final long getBeUlong48() { public final long getUlong56(final int pos) { final int position = origin + pos; - if (pos + 6 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 6))); + if (pos + 6 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 6))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -789,8 +847,9 @@ public final long getUlong56(final int pos) { * Return next 56-bit unsigned int from buffer. (little-endian) */ public final long getUlong56() { - if (position + 6 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 6)); + if (position + 6 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 6)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -805,8 +864,9 @@ public final long getUlong56() { public final long getBeUlong56(final int pos) { final int position = origin + pos; - if (pos + 6 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 6))); + if (pos + 6 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 6))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 6])) | ((long) (0xff & buf[position + 5]) << 8) @@ -819,8 +879,9 @@ public final long getBeUlong56(final int pos) { * Return next 56-bit unsigned int from buffer. (big-endian) */ public final long getBeUlong56() { - if (position + 6 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 6)); + if (position + 6 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 6)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++]) << 48) | ((long) (0xff & buf[position++]) << 40) @@ -837,8 +898,9 @@ public final long getBeUlong56() { public final long getLong64(final int pos) { final int position = origin + pos; - if (pos + 7 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 7))); + if (pos + 7 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 7))); + } byte[] buf = buffer; return ((long) (0xff & buf[position])) | ((long) (0xff & buf[position + 1]) << 8) @@ -855,8 +917,9 @@ public final long getLong64(final int pos) { public final long getBeLong64(final int pos) { final int position = origin + pos; - if (pos + 7 >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + 7))); + if (pos + 7 >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + 7))); + } byte[] buf = buffer; return ((long) (0xff & buf[position + 7])) | ((long) (0xff & buf[position + 6]) << 8) @@ -871,8 +934,9 @@ public final long getBeLong64(final int pos) { * @see mysql-5.1.60/include/my_global.h - sint8korr */ public final long getLong64() { - if (position + 7 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 7)); + if (position + 7 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 7)); + } byte[] buf = buffer; return ((long) (0xff & buf[position++])) | ((long) (0xff & buf[position++]) << 8) @@ -887,8 +951,9 @@ public final long getLong64() { * @see mysql-5.6.10/include/myisampack.h - mi_sint8korr */ public final long getBeLong64() { - if (position + 7 >= origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - origin + 7)); + if (position + 7 >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position - origin + 7)); + } byte[] buf = buffer; return ((long) (buf[position++]) << 56) | ((long) (0xff & buf[position++]) << 48) @@ -1038,29 +1103,27 @@ public final long getPackedLong() { } } - /* default ANSI charset */ - public static final String ISO_8859_1 = "ISO-8859-1"; - /** * Return fix length string from buffer. */ public final String getFixString(final int pos, final int len) { - return getFixString(pos, len, ISO_8859_1); + return getFixString(pos, len, StandardCharsets.ISO_8859_1); } /** * Return next fix length string from buffer. */ public final String getFixString(final int len) { - return getFixString(len, ISO_8859_1); + return getFixString(len, StandardCharsets.ISO_8859_1); } /** * Return fix length string from buffer. */ - public final String getFixString(final int pos, final int len, String charsetName) { - if (pos + len > limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + len))); + public final String getFixString(final int pos, final int len, Charset charset) { + if (pos + len > limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + len))); + } final int from = origin + pos; final int end = from + len; @@ -1069,19 +1132,16 @@ public final String getFixString(final int pos, final int len, String charsetNam for (; (found < end) && buf[found] != '\0'; found++) /* empty loop */; - try { - return new String(buf, from, found - from, charsetName); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); - } + return new String(buf, from, found - from, charset); } /** * Return next fix length string from buffer. */ - public final String getFixString(final int len, String charsetName) { - if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len - origin)); + public final String getFixString(final int len, Charset charset) { + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } final int from = position; final int end = from + len; @@ -1090,97 +1150,141 @@ public final String getFixString(final int len, String charsetName) { for (; (found < end) && buf[found] != '\0'; found++) /* empty loop */; - try { - String string = new String(buf, from, found - from, charsetName); - position += len; - return string; - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); - } + String string = new String(buf, from, found - from, charset); + position += len; + return string; } - /** - * Return fix-length string from buffer without null-terminate checking. Fix - * bug #17 {@link https://github.com/AlibabaTech/canal/issues/17 } - */ - public final String getFullString(final int pos, final int len, String charsetName) { - if (pos + len > limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos < 0 ? pos : (pos + len))); + public final String getFixName(final int len, Charset charset) { + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } - try { - return new String(buffer, origin + pos, len, charsetName); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); + final int from = position; + final int end = from + len; + byte[] buf = buffer; + int found = from; + for (; (found < end) && buf[found] != '\0'; found++) + /* empty loop */; + + int length = found - from; + String string = null; + if (length <= 16) { + string = NameCache.name(buf, from, length, charset); } + if (string == null) { + string = new String(buf, from, length, charset); + } + position += len; + return string; } - /** - * Return next fix-length string from buffer without null-terminate - * checking. Fix bug #17 {@link https - * ://github.com/AlibabaTech/canal/issues/17 } - */ - public final String getFullString(final int len, String charsetName) { - if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len - origin)); + public final String getFixName(final int len) { + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } - try { - String string = new String(buffer, position, len, charsetName); - position += len; - return string; - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); + final int from = position; + final int end = from + len; + byte[] buf = buffer; + int found = from; + for (; (found < end) && buf[found] != '\0'; found++) + /* empty loop */; + + int length = found - from; + String string = null; + if (length <= 16) { + string = NameCache.name(buf, from, length, StandardCharsets.ISO_8859_1); + } + if (string == null) { + string = new String(buf, from, length, StandardCharsets.ISO_8859_1); } + position += len; + return string; } /** - * Return dynamic length string from buffer. + * Return fix-length string from buffer without null-terminate checking. + * Fix bug #17 {@link https://github.com/AlibabaTech/canal/issues/17 } */ - public final String getString(final int pos) { - return getString(pos, ISO_8859_1); + public final String getFullString(final int pos, final int len, Charset charset) { + if (pos + len > limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos < 0 ? pos : (pos + len))); + } + + return new String(buffer, origin + pos, len, charset); } /** - * Return next dynamic length string from buffer. + * Return next fix-length string from buffer without null-terminate + * checking. + * Fix bug #17 {@link https://github.com/AlibabaTech/canal/issues/17 } */ - public final String getString() { - return getString(ISO_8859_1); + public final String getFullString(final int len, Charset charset) { + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } + + String string = new String(buffer, position, len, charset); + position += len; + return string; } + /** * Return dynamic length string from buffer. */ - public final String getString(final int pos, String charsetName) { - if (pos >= limit || pos < 0) throw new IllegalArgumentException("limit excceed: " + pos); + public final String getString(final int pos) { + if (pos >= limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + pos); + } byte[] buf = buffer; final int len = (0xff & buf[origin + pos]); - if (pos + len + 1 > limit) throw new IllegalArgumentException("limit excceed: " + (pos + len + 1)); - - try { - return new String(buf, origin + pos + 1, len, charsetName); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); + if (pos + len + 1 > limit) { + throw new IllegalArgumentException("limit excceed: " + (pos + len + 1)); } + + return new String(buf, origin + pos + 1, len, StandardCharsets.ISO_8859_1); } /** * Return next dynamic length string from buffer. */ - public final String getString(String charsetName) { - if (position >= origin + limit) throw new IllegalArgumentException("limit excceed: " + position); + public final String getString() { + if (position >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + position); + } byte[] buf = buffer; final int len = (0xff & buf[position]); - if (position + len + 1 > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len + 1 - origin)); + if (position + len + 1 > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + + (position + len + 1 - origin)); + } - try { - String string = new String(buf, position + 1, len, charsetName); - position += len + 1; - return string; - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Unsupported encoding: " + charsetName, e); + String string = new String(buf, position + 1, len, StandardCharsets.ISO_8859_1); + position += len + 1; + return string; + } + + public final String getName() { + if (position >= origin + limit) { + throw new IllegalArgumentException("limit excceed: " + position); + } + + byte[] buf = buffer; + final int len = (0xff & buf[position]); + if (position + len + 1 > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len + 1 - origin)); + } + + String string = NameCache.name(buf, position + 1, len, StandardCharsets.ISO_8859_1); + if (string == null) { + string = new String(buf, position + 1, len, StandardCharsets.ISO_8859_1); } + position += len + 1; + return string; } /** @@ -1445,8 +1549,9 @@ private final BigDecimal getDecimal0(final int begin, final int intg, final int } d_copy[begin] ^= 0x80; /* restore sign */ - String decimal = String.valueOf(buf, 0, pos); - return new BigDecimal(decimal); + // String decimal = String.valueOf(buf, 0, pos); + // return new BigDecimal(decimal); + return new BigDecimal(buf, 0, pos); } /** @@ -1455,8 +1560,9 @@ private final BigDecimal getDecimal0(final int begin, final int intg, final int * @param len The length of MY_BITMAP in bits. */ public final void fillBitmap(BitSet bitmap, final int pos, final int len) { - if (pos + ((len + 7) / 8) > limit || pos < 0) throw new IllegalArgumentException("limit excceed: " - + (pos + (len + 7) / 8)); + if (pos + ((len + 7) / 8) > limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos + (len + 7) / 8)); + } fillBitmap0(bitmap, origin + pos, len); } @@ -1467,9 +1573,9 @@ public final void fillBitmap(BitSet bitmap, final int pos, final int len) { * @param len The length of MY_BITMAP in bits. */ public final void fillBitmap(BitSet bitmap, final int len) { - if (position + ((len + 7) / 8) > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position - + ((len + 7) / 8) - origin)); + if (position + ((len + 7) / 8) > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + ((len + 7) / 8) - origin)); + } position = fillBitmap0(bitmap, position, len); } @@ -1545,7 +1651,9 @@ public final void fillOutput(OutputStream out, final int len) throws IOException * Fill n bytes in this buffer. */ public final void fillBytes(final int pos, byte[] dest, final int destPos, final int len) { - if (pos + len > limit || pos < 0) throw new IllegalArgumentException("limit excceed: " + (pos + len)); + if (pos + len > limit || pos < 0) { + throw new IllegalArgumentException("limit excceed: " + (pos + len)); + } System.arraycopy(buffer, origin + pos, dest, destPos, len); } @@ -1554,8 +1662,9 @@ public final void fillBytes(final int pos, byte[] dest, final int destPos, final * Fill next n bytes in this buffer. */ public final void fillBytes(byte[] dest, final int destPos, final int len) { - if (position + len > origin + limit) throw new IllegalArgumentException("limit excceed: " - + (position + len - origin)); + if (position + len > origin + limit) { + throw new IllegalArgumentException("limit excceed: " + (position + len - origin)); + } System.arraycopy(buffer, position, dest, destPos, len); position += len; @@ -1586,6 +1695,76 @@ public final byte[] getData() { return getData(0, limit); } + /** + * mariadb compress log event Get the length of uncompress content. + * + * @return 0 means error. + */ + public final long getUncompressLong(int lenPad) { + long len = 0; + switch (lenPad) { + case 1: + len = getInt8(); + break; + case 2: + len = getBeUint16(); + break; + case 3: + len = getBeUint24(); + break; + case 4: + len = getBeUint32(); + break; + default: + len = 0; + break; + } + + return len; + } + + /** + * uncompress mariadb log event + * + * @return + */ + public LogBuffer uncompressBuf() { + int lenPad = getInt8(); + long len = getUncompressLong(lenPad & 0x07); + int alg = (lenPad & 0x70) >> 4; + LogBuffer buffer = null; + try { + switch (alg) { + case 0: + buffer = uncompressZlib(limit - position); + break; + default: + // bad algorithm + return this; + } + } catch (Exception e) { + throw new IllegalArgumentException("uncompress failed ", e); + } + + if (buffer.limit() != len) { + throw new IllegalArgumentException( + "uncompress lenght not match, expected : " + len + " , but actual : " + buffer.limit()); + } + return buffer; + } + + private LogBuffer uncompressZlib(int len) throws Exception { + if (position + len > limit || position < 0) { + throw new IllegalArgumentException("limit excceed: " + (position + len)); + } + + try (DeflateCompressorInputStream in = new DeflateCompressorInputStream( + new ByteArrayInputStream(buffer, position, position + len))) { + byte[] decodeBytes = IOUtils.toByteArray(in); + return new LogBuffer(decodeBytes, 0, decodeBytes.length); + } + } + /** * Return full hexdump from position. */ @@ -1631,4 +1810,5 @@ public final String hexdump(final int pos, final int len) { } return ""; } + } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogContext.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogContext.java index 7234b50b51..0f4f55f932 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogContext.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogContext.java @@ -26,6 +26,8 @@ public final class LogContext { private LogEvent gtidLogEvent; // save current gtid log event + private boolean iterateDecode = false; + public LogContext(){ this.formatDescription = FormatDescriptionLogEvent.FORMAT_DESCRIPTION_EVENT_5_x; } @@ -82,4 +84,12 @@ public LogEvent getGtidLogEvent() { public void setGtidLogEvent(LogEvent gtidLogEvent) { this.gtidLogEvent = gtidLogEvent; } + + public boolean isIterateDecode() { + return iterateDecode; + } + + public void setIterateDecode(boolean iterateDecode) { + this.iterateDecode = iterateDecode; + } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogDecoder.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogDecoder.java index 989a34e032..61213ccce8 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogDecoder.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogDecoder.java @@ -1,49 +1,19 @@ package com.taobao.tddl.dbsync.binlog; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.BitSet; +import java.util.List; +import org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream; +import org.apache.commons.compress.utils.Lists; +import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.alibaba.otter.canal.parse.driver.mysql.packets.GTIDSet; -import com.taobao.tddl.dbsync.binlog.event.AppendBlockLogEvent; -import com.taobao.tddl.dbsync.binlog.event.BeginLoadQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.CreateFileLogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteFileLogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.ExecuteLoadLogEvent; -import com.taobao.tddl.dbsync.binlog.event.ExecuteLoadQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; -import com.taobao.tddl.dbsync.binlog.event.GtidLogEvent; -import com.taobao.tddl.dbsync.binlog.event.HeartbeatLogEvent; -import com.taobao.tddl.dbsync.binlog.event.IgnorableLogEvent; -import com.taobao.tddl.dbsync.binlog.event.IncidentLogEvent; -import com.taobao.tddl.dbsync.binlog.event.IntvarLogEvent; -import com.taobao.tddl.dbsync.binlog.event.LoadLogEvent; -import com.taobao.tddl.dbsync.binlog.event.LogHeader; -import com.taobao.tddl.dbsync.binlog.event.PreviousGtidsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.QueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RandLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RotateLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.StartLogEventV3; -import com.taobao.tddl.dbsync.binlog.event.StopLogEvent; -import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent; -import com.taobao.tddl.dbsync.binlog.event.TransactionContextLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UnknownLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UserVarLogEvent; -import com.taobao.tddl.dbsync.binlog.event.ViewChangeEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.XaPrepareLogEvent; -import com.taobao.tddl.dbsync.binlog.event.XidLogEvent; -import com.taobao.tddl.dbsync.binlog.event.mariadb.AnnotateRowsEvent; -import com.taobao.tddl.dbsync.binlog.event.mariadb.BinlogCheckPointLogEvent; -import com.taobao.tddl.dbsync.binlog.event.mariadb.MariaGtidListLogEvent; -import com.taobao.tddl.dbsync.binlog.event.mariadb.MariaGtidLogEvent; -import com.taobao.tddl.dbsync.binlog.event.mariadb.StartEncryptionLogEvent; +import com.taobao.tddl.dbsync.binlog.event.*; +import com.taobao.tddl.dbsync.binlog.event.mariadb.*; /** * Implements a binary-log decoder. @@ -87,6 +57,8 @@ public final void handle(final int flagIndex) { handleSet.set(flagIndex); } + private LogBuffer compressIterateBuffer; + /** * Decoding an event from binary-log buffer. * @@ -95,7 +67,6 @@ public final void handle(final int flagIndex) { */ public LogEvent decode(LogBuffer buffer, LogContext context) throws IOException { final int limit = buffer.limit(); - if (limit >= FormatDescriptionLogEvent.LOG_EVENT_HEADER_LEN) { LogHeader header = new LogHeader(buffer, context.getFormatDescription()); @@ -140,6 +111,59 @@ public LogEvent decode(LogBuffer buffer, LogContext context) throws IOException return null; } + /** + * * process compress binlog payload + * + * @param event + * @param context + * @return + * @throws IOException + */ + public List processIterateDecode(LogEvent event, LogContext context) throws IOException { + List events = Lists.newArrayList(); + if (event.getHeader().getType() == LogEvent.TRANSACTION_PAYLOAD_EVENT) { + // iterate for compresss payload + TransactionPayloadLogEvent compressEvent = ((TransactionPayloadLogEvent) event); + LogBuffer iterateBuffer = null; + if (compressEvent.isCompressByZstd()) { + try (ZstdCompressorInputStream in = new ZstdCompressorInputStream( + new ByteArrayInputStream(compressEvent.getPayload()))) { + byte[] decodeBytes = IOUtils.toByteArray(in); + iterateBuffer = new LogBuffer(decodeBytes, 0, decodeBytes.length); + } + } else if (compressEvent.isCompressByNone()) { + iterateBuffer = new LogBuffer(compressEvent.getPayload(), 0, compressEvent.getPayload().length); + } else { + throw new IllegalArgumentException("unknow compress type for " + event.getHeader().getLogFileName() + + ":" + event.getHeader().getLogPos()); + } + + try { + context.setIterateDecode(true); + while (iterateBuffer.hasRemaining()) {// iterate + LogEvent deEvent = decode(iterateBuffer, context); + if (deEvent == null) { + break; + } + + // compress event logPos = 0 + deEvent.getHeader().setLogFileName(event.getHeader().getLogFileName()); + deEvent.getHeader().setLogPos(event.getHeader().getLogPos()); + // 需要重置payload每个event的eventLen , ack位点更新依赖logPos - eventLen, + // 原因:每个payload都是uncompress的eventLen,无法对应物理binlog的eventLen + // 隐患:memory计算空间大小时会出现放大的情况,影响getBatch的数量 + deEvent.getHeader().setEventLen(event.getHeader().getEventLen()); + events.add(deEvent); + } + } finally { + context.setIterateDecode(false); + } + } else { + // TODO support mariadb compress binlog + } + return events; + } + /** * Deserialize an event from buffer. * @@ -158,8 +182,12 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con } if (checksumAlg != LogEvent.BINLOG_CHECKSUM_ALG_OFF && checksumAlg != LogEvent.BINLOG_CHECKSUM_ALG_UNDEF) { - // remove checksum bytes - buffer.limit(header.getEventLen() - LogEvent.BINLOG_CHECKSUM_LEN); + if (context.isIterateDecode()) { + // transaction compress payload在主事件已经处理了checksum,遍历解析event忽略checksum处理 + } else { + // remove checksum bytes + buffer.limit(header.getEventLen() - LogEvent.BINLOG_CHECKSUM_LEN); + } } GTIDSet gtidSet = context.getGtidSet(); LogEvent gtidLogEvent = context.getGtidLogEvent(); @@ -228,8 +256,9 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con } case LogEvent.SLAVE_EVENT: /* can never happen (unused event) */ { - if (logger.isWarnEnabled()) logger.warn("Skipping unsupported SLAVE_EVENT from: " - + context.getLogPosition()); + if (logger.isWarnEnabled()) { + logger.warn("Skipping unsupported SLAVE_EVENT from: " + context.getLogPosition()); + } break; } case LogEvent.CREATE_FILE_EVENT: { @@ -295,22 +324,25 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con return descriptionEvent; } case LogEvent.PRE_GA_WRITE_ROWS_EVENT: { - if (logger.isWarnEnabled()) logger.warn("Skipping unsupported PRE_GA_WRITE_ROWS_EVENT from: " - + context.getLogPosition()); + if (logger.isWarnEnabled()) { + logger.warn("Skipping unsupported PRE_GA_WRITE_ROWS_EVENT from: " + context.getLogPosition()); + } // ev = new Write_rows_log_event_old(buf, event_len, // description_event); break; } case LogEvent.PRE_GA_UPDATE_ROWS_EVENT: { - if (logger.isWarnEnabled()) logger.warn("Skipping unsupported PRE_GA_UPDATE_ROWS_EVENT from: " - + context.getLogPosition()); + if (logger.isWarnEnabled()) { + logger.warn("Skipping unsupported PRE_GA_UPDATE_ROWS_EVENT from: " + context.getLogPosition()); + } // ev = new Update_rows_log_event_old(buf, event_len, // description_event); break; } case LogEvent.PRE_GA_DELETE_ROWS_EVENT: { - if (logger.isWarnEnabled()) logger.warn("Skipping unsupported PRE_GA_DELETE_ROWS_EVENT from: " - + context.getLogPosition()); + if (logger.isWarnEnabled()) { + logger.warn("Skipping unsupported PRE_GA_DELETE_ROWS_EVENT from: " + context.getLogPosition()); + } // ev = new Delete_rows_log_event_old(buf, event_len, // description_event); break; @@ -386,6 +418,12 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con logPosition.position = header.getLogPos(); return event; } + case LogEvent.TRANSACTION_PAYLOAD_EVENT: { + TransactionPayloadLogEvent event = new TransactionPayloadLogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + return event; + } case LogEvent.VIEW_CHANGE_EVENT: { ViewChangeEvent event = new ViewChangeEvent(header, buffer, descriptionEvent); /* updating position in context */ @@ -409,6 +447,7 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con BinlogCheckPointLogEvent event = new BinlogCheckPointLogEvent(header, buffer, descriptionEvent); /* updating position in context */ logPosition.position = header.getLogPos(); + logPosition.fileName = event.getFilename(); return event; } case LogEvent.GTID_EVENT: { @@ -443,6 +482,45 @@ public static LogEvent decode(LogBuffer buffer, LogHeader header, LogContext con logPosition.position = header.getLogPos(); return event; } + case LogEvent.HEARTBEAT_LOG_EVENT_V2: { + HeartbeatV2LogEvent event = new HeartbeatV2LogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + return event; + } + case LogEvent.QUERY_COMPRESSED_EVENT: { + QueryCompressedLogEvent event = new QueryCompressedLogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + return event; + } + case LogEvent.WRITE_ROWS_COMPRESSED_EVENT_V1: + case LogEvent.WRITE_ROWS_COMPRESSED_EVENT: { + WriteRowsCompressLogEvent event = new WriteRowsCompressLogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + event.fillTable(context); + header.putGtid(context.getGtidSet(), gtidLogEvent); + return event; + } + case LogEvent.UPDATE_ROWS_COMPRESSED_EVENT_V1: + case LogEvent.UPDATE_ROWS_COMPRESSED_EVENT: { + UpdateRowsCompressLogEvent event = new UpdateRowsCompressLogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + event.fillTable(context); + header.putGtid(context.getGtidSet(), gtidLogEvent); + return event; + } + case LogEvent.DELETE_ROWS_COMPRESSED_EVENT_V1: + case LogEvent.DELETE_ROWS_COMPRESSED_EVENT: { + DeleteRowsCompressLogEvent event = new DeleteRowsCompressLogEvent(header, buffer, descriptionEvent); + /* updating position in context */ + logPosition.position = header.getLogPos(); + event.fillTable(context); + header.putGtid(context.getGtidSet(), gtidLogEvent); + return event; + } default: /* * Create an object of Ignorable_log_event for unrecognized diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogEvent.java index 412085169e..ace5f2fdac 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/LogEvent.java @@ -175,6 +175,14 @@ public abstract class LogEvent { */ public static final int PARTIAL_UPDATE_ROWS_EVENT = 39; + /* mysql 8.0.20 */ + public static final int TRANSACTION_PAYLOAD_EVENT = 40; + + /* mysql 8.0.26 */ + public static final int HEARTBEAT_LOG_EVENT_V2 = 41; + + public static final int MYSQL_ENUM_END_EVENT = 42; + // mariaDb 5.5.34 /* New MySQL/Sun events are to be added right above this comment */ public static final int MYSQL_EVENTS_END = 49; @@ -205,8 +213,23 @@ public abstract class LogEvent { public static final int START_ENCRYPTION_EVENT = 164; + // mariadb 10.10.1 + /* + * Compressed binlog event. Note that the order between WRITE/UPDATE/DELETE + * events is significant; this is so that we can convert from the compressed to + * the uncompressed event type with (type-WRITE_ROWS_COMPRESSED_EVENT + + * WRITE_ROWS_EVENT) and similar for _V1. + */ + public static final int QUERY_COMPRESSED_EVENT = 165; + public static final int WRITE_ROWS_COMPRESSED_EVENT_V1 = 166; + public static final int UPDATE_ROWS_COMPRESSED_EVENT_V1 = 167; + public static final int DELETE_ROWS_COMPRESSED_EVENT_V1 = 168; + public static final int WRITE_ROWS_COMPRESSED_EVENT = 169; + public static final int UPDATE_ROWS_COMPRESSED_EVENT = 170; + public static final int DELETE_ROWS_COMPRESSED_EVENT = 171; + /** end marker */ - public static final int ENUM_END_EVENT = 165; + public static final int ENUM_END_EVENT = 171; /** * 1 byte length, 1 byte format Length is total length in bytes, including 2 @@ -283,6 +306,9 @@ public abstract class LogEvent { public static final int MYSQL_TYPE_TIMESTAMP2 = 17; public static final int MYSQL_TYPE_DATETIME2 = 18; public static final int MYSQL_TYPE_TIME2 = 19; + public static final int MYSQL_TYPE_TYPED_ARRAY = 20; + public static final int MYSQL_TYPE_INVALID = 243; + public static final int MYSQL_TYPE_BOOL = 244; public static final int MYSQL_TYPE_JSON = 245; public static final int MYSQL_TYPE_NEWDECIMAL = 246; public static final int MYSQL_TYPE_ENUM = 247; @@ -350,6 +376,7 @@ public static String getTypeName(final int type) { case INCIDENT_EVENT: return "Incident"; case HEARTBEAT_LOG_EVENT: + case HEARTBEAT_LOG_EVENT_V2: return "Heartbeat"; case IGNORABLE_LOG_EVENT: return "Ignorable"; @@ -369,8 +396,16 @@ public static String getTypeName(final int type) { return "Previous_gtids"; case PARTIAL_UPDATE_ROWS_EVENT: return "Update_rows_partial"; + case TRANSACTION_CONTEXT_EVENT : + return "Transaction_context"; + case VIEW_CHANGE_EVENT : + return "view_change"; + case XA_PREPARE_LOG_EVENT : + return "Xa_prepare"; + case TRANSACTION_PAYLOAD_EVENT : + return "transaction_payload"; default: - return "Unknown"; /* impossible */ + return "Unknown type:" + type; } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/NameCache.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/NameCache.java new file mode 100644 index 0000000000..e10a8bebeb --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/NameCache.java @@ -0,0 +1 @@ +package com.taobao.tddl.dbsync.binlog; import java.nio.charset.Charset; /** * string cache * @since 1.1.7 */ public class NameCache { static final NameCacheEntry[] NAME_CACHE = new NameCacheEntry[8192]; static final NameCacheEntry2[] NAME_CACHE2 = new NameCacheEntry2[8192]; static final class NameCacheEntry { final String name; final long value; public NameCacheEntry(String name, long value){ this.name = name; this.value = value; } } static final class NameCacheEntry2 { final String name; final long value0; final long value1; public NameCacheEntry2(String name, long value0, long value1){ this.name = name; this.value0 = value0; this.value1 = value1; } } static String name(byte[] buf, int from, int length, Charset charset) { long nameValue0 = -1, nameValue1 = -1; switch (length) { case 1: nameValue0 = buf[from]; break; case 2: nameValue0 = (buf[from + 1] << 8) + (buf[from] & 0xFF); break; case 3: nameValue0 = (buf[from + 2] << 16) + ((buf[from + 1] & 0xFF) << 8) + (buf[from] & 0xFF); break; case 4: nameValue0 = (buf[from + 3] << 24) + ((buf[from + 2] & 0xFF) << 16) + ((buf[from + 1] & 0xFF) << 8) + (buf[from] & 0xFF); break; case 5: nameValue0 = (((long) buf[from + 4]) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); break; case 6: nameValue0 = (((long) buf[from + 5]) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); break; case 7: nameValue0 = (((long) buf[from + 6]) << 48) + ((buf[from + 5] & 0xFFL) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); break; case 8: nameValue0 = (((long) buf[from + 7]) << 56) + ((buf[from + 6] & 0xFFL) << 48) + ((buf[from + 5] & 0xFFL) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); break; case 9: nameValue0 = buf[from]; nameValue1 = (((long) buf[from + 8]) << 56) + ((buf[from + 7] & 0xFFL) << 48) + ((buf[from + 6] & 0xFFL) << 40) + ((buf[from + 5] & 0xFFL) << 32) + ((buf[from + 4] & 0xFFL) << 24) + ((buf[from + 3] & 0xFFL) << 16) + ((buf[from + 2] & 0xFFL) << 8) + (buf[from + 1] & 0xFFL); break; case 10: nameValue0 = (buf[from + 1] << 8) + (buf[from]); nameValue1 = (((long) buf[from + 9]) << 56) + ((buf[from + 8] & 0xFFL) << 48) + ((buf[from + 7] & 0xFFL) << 40) + ((buf[from + 6] & 0xFFL) << 32) + ((buf[from + 5] & 0xFFL) << 24) + ((buf[from + 4] & 0xFFL) << 16) + ((buf[from + 3] & 0xFFL) << 8) + (buf[from + 2] & 0xFFL); break; case 11: nameValue0 = (buf[from + 2] << 16) + (buf[from + 1] << 8) + (buf[from]); nameValue1 = (((long) buf[from + 10]) << 56) + ((buf[from + 9] & 0xFFL) << 48) + ((buf[from + 8] & 0xFFL) << 40) + ((buf[from + 7] & 0xFFL) << 32) + ((buf[from + 6] & 0xFFL) << 24) + ((buf[from + 5] & 0xFFL) << 16) + ((buf[from + 4] & 0xFFL) << 8) + (buf[from + 3] & 0xFFL); break; case 12: nameValue0 = (buf[from + 3] << 24) + (buf[from + 2] << 16) + (buf[from + 1] << 8) + (buf[from]); nameValue1 = (((long) buf[from + 11]) << 56) + ((buf[from + 10] & 0xFFL) << 48) + ((buf[from + 9] & 0xFFL) << 40) + ((buf[from + 8] & 0xFFL) << 32) + ((buf[from + 7] & 0xFFL) << 24) + ((buf[from + 6] & 0xFFL) << 16) + ((buf[from + 5] & 0xFFL) << 8) + (buf[from + 4] & 0xFFL); break; case 13: nameValue0 = (((long) buf[from + 4]) << 32) + (((long) buf[from + 3]) << 24) + (((long) buf[from + 2]) << 16) + (((long) buf[from + 1]) << 8) + ((long) buf[from]); nameValue1 = (((long) buf[from + 12]) << 56) + ((buf[from + 11] & 0xFFL) << 48) + ((buf[from + 10] & 0xFFL) << 40) + ((buf[from + 9] & 0xFFL) << 32) + ((buf[from + 8] & 0xFFL) << 24) + ((buf[from + 7] & 0xFFL) << 16) + ((buf[from + 6] & 0xFFL) << 8) + (buf[from + 5] & 0xFFL); break; case 14: nameValue0 = (((long) buf[from + 5]) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); nameValue1 = (((long) buf[from + 13]) << 56) + ((buf[from + 12] & 0xFFL) << 48) + ((buf[from + 11] & 0xFFL) << 40) + ((buf[from + 10] & 0xFFL) << 32) + ((buf[from + 9] & 0xFFL) << 24) + ((buf[from + 8] & 0xFFL) << 16) + ((buf[from + 7] & 0xFFL) << 8) + (buf[from + 6] & 0xFFL); break; case 15: nameValue0 = (((long) buf[from + 6]) << 48) + ((buf[from + 5] & 0xFFL) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); nameValue1 = (((long) buf[from + 14]) << 56) + ((buf[from + 13] & 0xFFL) << 48) + ((buf[from + 12] & 0xFFL) << 40) + ((buf[from + 11] & 0xFFL) << 32) + ((buf[from + 10] & 0xFFL) << 24) + ((buf[from + 9] & 0xFFL) << 16) + ((buf[from + 8] & 0xFFL) << 8) + (buf[from + 7] & 0xFFL); break; case 16: nameValue0 = (((long) buf[from + 7]) << 56) + ((buf[from + 6] & 0xFFL) << 48) + ((buf[from + 5] & 0xFFL) << 40) + ((buf[from + 4] & 0xFFL) << 32) + ((buf[from + 3] & 0xFFL) << 24) + ((buf[from + 2] & 0xFFL) << 16) + ((buf[from + 1] & 0xFFL) << 8) + (buf[from] & 0xFFL); nameValue1 = (((long) buf[from + 15]) << 56) + ((buf[from + 14] & 0xFFL) << 48) + ((buf[from + 13] & 0xFFL) << 40) + ((buf[from + 12] & 0xFFL) << 32) + ((buf[from + 11] & 0xFFL) << 24) + ((buf[from + 10] & 0xFFL) << 16) + ((buf[from + 9] & 0xFFL) << 8) + (buf[from + 8] & 0xFFL); break; default: break; } if (nameValue0 != -1) { if (nameValue1 != -1) { int indexMask = ((int) nameValue1) & (NameCache.NAME_CACHE2.length - 1); NameCache.NameCacheEntry2 entry = NameCache.NAME_CACHE2[indexMask]; if (entry == null) { String name = new String(buf, from, length, charset); NameCache.NAME_CACHE2[indexMask] = new NameCacheEntry2(name, nameValue0, nameValue1); return name; } else if (entry.value0 == nameValue0 && entry.value1 == nameValue1) { return entry.name; } } else { int indexMask = ((int) nameValue0) & (NAME_CACHE.length - 1); NameCacheEntry entry = NAME_CACHE[indexMask]; if (entry == null) { String name = new String(buf, from, length, charset); NAME_CACHE[indexMask] = new NameCacheEntry(name, nameValue0); return name; } else if (entry.value == nameValue0) { return entry.name; } } } return null; } } \ No newline at end of file diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/DeleteRowsLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/DeleteRowsLogEvent.java index 4f06a97b53..09ab15ed8e 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/DeleteRowsLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/DeleteRowsLogEvent.java @@ -9,9 +9,14 @@ * @author Changyuan.lh * @version 1.0 */ -public final class DeleteRowsLogEvent extends RowsLogEvent { +public class DeleteRowsLogEvent extends RowsLogEvent { public DeleteRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ - super(header, buffer, descriptionEvent); + super(header, buffer, descriptionEvent, false, false); + } + + public DeleteRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, + boolean compress){ + super(header, buffer, descriptionEvent, false, compress); } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/FormatDescriptionLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/FormatDescriptionLogEvent.java index a14b9111f6..ca185c1901 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/FormatDescriptionLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/FormatDescriptionLogEvent.java @@ -62,6 +62,7 @@ public final class FormatDescriptionLogEvent extends StartLogEventV3 { public static final int TRANSACTION_CONTEXT_HEADER_LEN = 18; public static final int VIEW_CHANGE_HEADER_LEN = 52; public static final int XA_PREPARE_HEADER_LEN = 0; + public static final int TRANSACTION_PAYLOAD_HEADER_LEN = 0; public static final int ANNOTATE_ROWS_HEADER_LEN = 0; public static final int BINLOG_CHECKPOINT_HEADER_LEN = 4; @@ -113,14 +114,14 @@ public FormatDescriptionLogEvent(LogHeader header, LogBuffer buffer, FormatDescr long calc = getVersionProduct(); if (calc >= checksumVersionProduct) { /* - * the last bytes are the checksum alg desc and value (or value's - * room) + * the last bytes are the checksum alg desc and value (or value's room) */ numberOfEventTypes -= BINLOG_CHECKSUM_ALG_DESC_LEN; } - if (logger.isInfoEnabled()) logger.info("common_header_len= " + commonHeaderLen + ", number_of_event_types= " - + numberOfEventTypes); + if (logger.isInfoEnabled()) { + logger.info("common_header_len= " + commonHeaderLen + ", number_of_event_types= " + numberOfEventTypes); + } } /** MySQL 5.0 format descriptions. */ @@ -212,6 +213,7 @@ public FormatDescriptionLogEvent(final int binlogVersion){ postHeaderLen[VIEW_CHANGE_EVENT - 1] = VIEW_CHANGE_HEADER_LEN; postHeaderLen[XA_PREPARE_LOG_EVENT - 1] = XA_PREPARE_HEADER_LEN; postHeaderLen[PARTIAL_UPDATE_ROWS_EVENT - 1] = ROWS_HEADER_LEN_V2; + postHeaderLen[TRANSACTION_PAYLOAD_EVENT - 1] = TRANSACTION_PAYLOAD_HEADER_LEN; // mariadb 10 postHeaderLen[ANNOTATE_ROWS_EVENT - 1] = ANNOTATE_ROWS_HEADER_LEN; @@ -219,6 +221,15 @@ public FormatDescriptionLogEvent(final int binlogVersion){ postHeaderLen[GTID_EVENT - 1] = GTID_HEADER_LEN; postHeaderLen[GTID_LIST_EVENT - 1] = GTID_LIST_HEADER_LEN; postHeaderLen[START_ENCRYPTION_EVENT - 1] = START_ENCRYPTION_HEADER_LEN; + + // mariadb compress + postHeaderLen[QUERY_COMPRESSED_EVENT - 1] = QUERY_COMPRESSED_EVENT; + postHeaderLen[WRITE_ROWS_COMPRESSED_EVENT - 1] = ROWS_HEADER_LEN_V2; + postHeaderLen[UPDATE_ROWS_COMPRESSED_EVENT - 1] = ROWS_HEADER_LEN_V2; + postHeaderLen[DELETE_ROWS_COMPRESSED_EVENT - 1] = ROWS_HEADER_LEN_V2; + postHeaderLen[WRITE_ROWS_COMPRESSED_EVENT_V1 - 1] = ROWS_HEADER_LEN_V1; + postHeaderLen[UPDATE_ROWS_COMPRESSED_EVENT_V1 - 1] = ROWS_HEADER_LEN_V1; + postHeaderLen[DELETE_ROWS_COMPRESSED_EVENT_V1 - 1] = ROWS_HEADER_LEN_V1; break; case 3: /* 4.0.x x>=2 */ diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/GtidLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/GtidLogEvent.java index 49557fc68a..bafd1d076e 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/GtidLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/GtidLogEvent.java @@ -18,12 +18,13 @@ public class GtidLogEvent extends LogEvent { // / Length of SID in event encoding public static final int ENCODED_SID_LENGTH = 16; public static final int LOGICAL_TIMESTAMP_TYPE_CODE = 2; + public static final UUID UUID_ZERO = new UUID(0, 0); private boolean commitFlag; private UUID sid; private long gno; - private Long lastCommitted; - private Long sequenceNumber; + private long lastCommitted; + private long sequenceNumber; public GtidLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ super(header); @@ -35,11 +36,13 @@ public GtidLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEven buffer.position(commonHeaderLen); commitFlag = (buffer.getUint8() != 0); // ENCODED_FLAG_LENGTH - byte[] bs = buffer.getData(ENCODED_SID_LENGTH); - ByteBuffer bb = ByteBuffer.wrap(bs); - long high = bb.getLong(); - long low = bb.getLong(); - sid = new UUID(high, low); + long high = buffer.getBeLong64(); + long low = buffer.getBeLong64(); + if (high == 0 && low == 0) { + sid = UUID_ZERO; + } else { + sid = new UUID(high, low); + } gno = buffer.getLong64(); @@ -73,11 +76,11 @@ public long getGno() { return gno; } - public Long getLastCommitted() { + public long getLastCommitted() { return lastCommitted; } - public Long getSequenceNumber() { + public long getSequenceNumber() { return sequenceNumber; } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatLogEvent.java index f3948d7e1d..ea8c519e6c 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatLogEvent.java @@ -3,6 +3,8 @@ import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.LogEvent; +import java.nio.charset.StandardCharsets; + /** *
  * Replication event to ensure to slave that master is alive.
@@ -35,7 +37,7 @@ public HeartbeatLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLo
             identLen = FN_REFLEN - 1;
         }
 
-        logIdent = buffer.getFullString(commonHeaderLen, identLen, LogBuffer.ISO_8859_1);
+        logIdent = buffer.getFullString(commonHeaderLen, identLen, StandardCharsets.ISO_8859_1);
     }
 
     public int getIdentLen() {
diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatV2LogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatV2LogEvent.java
new file mode 100644
index 0000000000..e6619c7ccb
--- /dev/null
+++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/HeartbeatV2LogEvent.java
@@ -0,0 +1,40 @@
+package com.taobao.tddl.dbsync.binlog.event;
+
+import com.taobao.tddl.dbsync.binlog.LogBuffer;
+import com.taobao.tddl.dbsync.binlog.LogEvent;
+
+/**
+ * 
+ *   Replication event to ensure to replica that source is alive.
+ *   The event is originated by source's dump thread and sent straight to
+ *   replica without being logged. Slave itself does not store it in relay log
+ *   but rather uses a data for immediate checks and throws away the event.
+ *   Two members of the class m_log_filename and m_log_position comprise
+ *   @see the rpl_event_coordinates instance. The coordinates that a heartbeat
+ *   instance carries correspond to the last event source has sent from
+ *   its binlog.
+ *   Also this event will be generated only for the source server with
+ *   version > 8.0.26
+ * 
+ * + * @author jianghang 2022-09-01 下午16:36:29 + * @version 1.1.6 + * @since mysql 8.0.26 + */ +public class HeartbeatV2LogEvent extends LogEvent { + + private byte[] payload; + + public HeartbeatV2LogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ + super(header); + + final int commonHeaderLen = descriptionEvent.commonHeaderLen; + int payloadLenth = buffer.limit() - commonHeaderLen; + // see : https://github.com/mysql/mysql-server/commit/59e590738a772b74ad50c4a57c86aaa1bc6501c7#diff-184e9a7d8a58f080974e475d4199fe5c6da5518c8a2811cc5df5988c8f9e9797 + payload = buffer.getData(payloadLenth); + } + + public byte[] getPayload() { + return payload; + } +} diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/IncidentLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/IncidentLogEvent.java index f0c03e5645..f1a4b73d9c 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/IncidentLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/IncidentLogEvent.java @@ -67,7 +67,7 @@ public IncidentLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLog incident = incidentNumber; buffer.position(commonHeaderLen + postHeaderLen); - message = buffer.getString(); + message = buffer.getName(); } public final int getIncident() { diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/LogHeader.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/LogHeader.java index c6aaa968bf..a7de734cfb 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/LogHeader.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/LogHeader.java @@ -1,12 +1,12 @@ package com.taobao.tddl.dbsync.binlog.event; +import java.util.HashMap; +import java.util.Map; + import com.alibaba.otter.canal.parse.driver.mysql.packets.GTIDSet; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.LogEvent; -import java.util.HashMap; -import java.util.Map; - /** * The Common-Header, documented in the table @ref Table_common_header "below", * always has the same form and length within one version of MySQL. Each event @@ -66,7 +66,7 @@ */ public final class LogHeader { - protected final int type; + protected int type; /** * The offset in the log where this event originally appeared (it is @@ -293,6 +293,12 @@ public String getLogFileName() { public void setLogFileName(String logFileName) { this.logFileName = logFileName; } + public void setLogPos(long logPos) { + this.logPos = logPos; + } + public void setEventLen(int eventLen) { + this.eventLen = eventLen; + } private void processCheckSum(LogBuffer buffer) { if (checksumAlg != LogEvent.BINLOG_CHECKSUM_ALG_OFF && checksumAlg != LogEvent.BINLOG_CHECKSUM_ALG_UNDEF) { @@ -320,7 +326,7 @@ public void putGtid(GTIDSet gtidSet, LogEvent gtidEvent) { if (gtidSet != null) { gtidMap.put(GTID_SET_STRING, gtidSet.toString()); if (gtidEvent != null && gtidEvent instanceof GtidLogEvent) { - GtidLogEvent event = (GtidLogEvent)gtidEvent; + GtidLogEvent event = (GtidLogEvent) gtidEvent; gtidMap.put(CURRENT_GTID_STRING, event.getGtidStr()); gtidMap.put(CURRENT_GTID_SN, String.valueOf(event.getSequenceNumber())); gtidMap.put(CURRENT_GTID_LAST_COMMIT, String.valueOf(event.getLastCommitted())); diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/QueryLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/QueryLogEvent.java index 8cc49795f6..4062064a9c 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/QueryLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/QueryLogEvent.java @@ -433,14 +433,18 @@ public class QueryLogEvent extends LogEvent { private int serverCollation = -1; private int tvSec = -1; private BigInteger ddlXid = BigInteger.valueOf(-1L); - private String charsetName; + private Charset charset; private String timezone; public QueryLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent) - throws IOException{ - super(header); + throws IOException{ + this(header, buffer, descriptionEvent, false); + } + public QueryLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, + boolean compress) throws IOException{ + super(header); final int commonHeaderLen = descriptionEvent.commonHeaderLen; final int postHeaderLen = descriptionEvent.postHeaderLen[header.type - 1]; /* @@ -495,15 +499,20 @@ public QueryLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEve unpackVariables(buffer, end); buffer.position(end); buffer.limit(limit); - /* A 2nd variable part; this is common to all versions */ - final int queryLen = dataLen - dbLen - 1; - dbname = buffer.getFixString(dbLen + 1); + dbname = buffer.getFixName(dbLen + 1); + int queryLen = dataLen - dbLen - 1; + if (compress) { + // mariadb compress log event + // see https://github.com/alibaba/canal/issues/4388 + buffer = buffer.uncompressBuf(); + queryLen = buffer.limit(); + } if (clientCharset >= 0) { - charsetName = CharsetConversion.getJavaCharset(clientCharset); + charset = CharsetConversion.getNioCharset(clientCharset); - if ((charsetName != null) && (Charset.isSupported(charsetName))) { - query = buffer.getFixString(queryLen, charsetName); + if (charset != null) { + query = buffer.getFixString(queryLen, charset); } else { logger.warn("unsupported character set in query log: " + "\n ID = " + clientCharset + ", Charset = " + CharsetConversion.getCharset(clientCharset) + ", Collation = " @@ -603,6 +612,13 @@ public QueryLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEve */ public static final int Q_HRNOW = 128; + /** + * Support MariaDB 10.10.1 + */ + public static final int Q_XID = 129; + + public static final int Q_GTID_FLAGS3 = 130; + private final void unpackVariables(LogBuffer buffer, final int end) throws IOException { int code = -1; try { @@ -613,10 +629,10 @@ private final void unpackVariables(LogBuffer buffer, final int end) throws IOExc break; case Q_SQL_MODE_CODE: sql_mode = buffer.getLong64(); // QQ: Fix when sql_mode - // is ulonglong + // is ulonglong break; case Q_CATALOG_NZ_CODE: - catalog = buffer.getString(); + catalog = buffer.getName(); break; case Q_AUTO_INCREMENT: autoIncrementIncrement = buffer.getUint16(); @@ -632,7 +648,7 @@ private final void unpackVariables(LogBuffer buffer, final int end) throws IOExc serverCollation = buffer.getUint16(); break; case Q_TIME_ZONE_CODE: - timezone = buffer.getString(); + timezone = buffer.getName(); break; case Q_CATALOG_CODE: /* for 5.0.x where 0<=x<=3 masters */ final int len = buffer.getUint8(); @@ -656,8 +672,8 @@ private final void unpackVariables(LogBuffer buffer, final int end) throws IOExc buffer.forward(4); break; case Q_INVOKER: - user = buffer.getString(); - host = buffer.getString(); + user = buffer.getName(); + host = buffer.getName(); break; case Q_MICROSECONDS: // when.tv_usec= uint3korr(pos); @@ -678,7 +694,7 @@ private final void unpackVariables(LogBuffer buffer, final int end) throws IOExc String mtsAccessedDbNames[] = new String[mtsAccessedDbs]; for (int i = 0; i < mtsAccessedDbs && buffer.position() < end; i++) { int length = end - buffer.position(); - mtsAccessedDbNames[i] = buffer.getFixString(length < NAME_LEN ? length : NAME_LEN); + mtsAccessedDbNames[i] = buffer.getFixName(length < NAME_LEN ? length : NAME_LEN); } break; case Q_EXPLICIT_DEFAULTS_FOR_TIMESTAMP: @@ -701,6 +717,25 @@ private final void unpackVariables(LogBuffer buffer, final int end) throws IOExc // int when_sec_part = buffer.getUint24(); buffer.forward(3); break; + case Q_XID: + // xid= uint8korr(pos); + buffer.forward(8); + break; + case Q_GTID_FLAGS3: + // gtid_flags_extra= *pos++; + // if (gtid_flags_extra & (Gtid_log_event::FL_COMMIT_ALTER_E1 | + // Gtid_log_event::FL_ROLLBACK_ALTER_E1)) { + // sa_seq_no = uint8korr(pos); + // pos+= 8; + // } + + int gtid_flags_extra = buffer.getUint8(); + final int FL_COMMIT_ALTER_E1= 4; + final int FL_ROLLBACK_ALTER_E1= 8; + if ((gtid_flags_extra & (FL_COMMIT_ALTER_E1 | FL_ROLLBACK_ALTER_E1))> 0) { + buffer.forward(8); + } + break; default: /* * That's why you must write status vars in growing @@ -740,16 +775,22 @@ private static final String findCodeName(final int code) { return "Q_TABLE_MAP_FOR_UPDATE_CODE"; case Q_MASTER_DATA_WRITTEN_CODE: return "Q_MASTER_DATA_WRITTEN_CODE"; - case Q_UPDATED_DB_NAMES: - return "Q_UPDATED_DB_NAMES"; + case Q_INVOKER: + return "case Q_INVOKER"; case Q_MICROSECONDS: return "Q_MICROSECONDS"; + case Q_UPDATED_DB_NAMES: + return "Q_UPDATED_DB_NAMES"; + case Q_EXPLICIT_DEFAULTS_FOR_TIMESTAMP: + return "Q_EXPLICIT_DEFAULTS_FOR_TIMESTAMP"; case Q_DDL_LOGGED_WITH_XID: return "Q_DDL_LOGGED_WITH_XID"; case Q_DEFAULT_COLLATION_FOR_UTF8MB4: return "Q_DEFAULT_COLLATION_FOR_UTF8MB4"; case Q_SQL_REQUIRE_PRIMARY_KEY: return "Q_SQL_REQUIRE_PRIMARY_KEY"; + case Q_HRNOW: + return "Q_HRNOW"; } return "CODE#" + code; } @@ -797,8 +838,8 @@ public final long getAutoIncrementOffset() { return autoIncrementOffset; } - public final String getCharsetName() { - return charsetName; + public final Charset getCharset() { + return charset; } public final String getTimezone() { diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RotateLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RotateLogEvent.java index 6aef177d55..5c7e735a68 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RotateLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RotateLogEvent.java @@ -88,9 +88,10 @@ public RotateLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEv final int filenameOffset = headerSize + postHeaderLen; int filenameLen = buffer.limit() - filenameOffset; - if (filenameLen > FN_REFLEN - 1) filenameLen = FN_REFLEN - 1; + if (filenameLen > FN_REFLEN - 1) { + filenameLen = FN_REFLEN - 1; + } buffer.position(filenameOffset); - filename = buffer.getFixString(filenameLen); } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogBuffer.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogBuffer.java index a27476733d..79335e11d5 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogBuffer.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogBuffer.java @@ -1,8 +1,11 @@ package com.taobao.tddl.dbsync.binlog.event; import java.io.Serializable; +import java.nio.charset.Charset; import java.sql.Timestamp; import java.sql.Types; +import java.time.ZoneId; +import java.time.zone.ZoneRules; import java.util.BitSet; import org.apache.commons.logging.Log; @@ -23,7 +26,11 @@ */ public final class RowsLogBuffer { - protected static final Log logger = LogFactory.getLog(RowsLogBuffer.class); + protected static final Log logger = LogFactory.getLog(RowsLogBuffer.class); + public static final Long[] longCache = new Long[1024 * 128]; + public static final int longCacheLimit = longCache.length + 127; + public static final Integer[] integerCache = new Integer[1024 * 128]; + public static final int integerCacheLimit = longCache.length + 127; public static final long DATETIMEF_INT_OFS = 0x8000000000L; public static final long TIMEF_INT_OFS = 0x800000L; @@ -33,7 +40,7 @@ public final class RowsLogBuffer { private final LogBuffer buffer; private final int columnLen; private final int jsonColumnCount; - private final String charsetName; + private final Charset charset; private final BitSet nullBits; private int nullBitIndex; @@ -47,10 +54,10 @@ public final class RowsLogBuffer { private int length; private Serializable value; - public RowsLogBuffer(LogBuffer buffer, final int columnLen, String charsetName, int jsonColumnCount, boolean partial){ + public RowsLogBuffer(LogBuffer buffer, final int columnLen, Charset charset, int jsonColumnCount, boolean partial){ this.buffer = buffer; this.columnLen = columnLen; - this.charsetName = charsetName; + this.charset = charset; this.partial = partial; this.jsonColumnCount = jsonColumnCount; this.nullBits = new BitSet(columnLen); @@ -309,7 +316,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina // XXX: How to check signed / unsigned? // value = unsigned ? Long.valueOf(buffer.getUint32()) : // Integer.valueOf(buffer.getInt32()); - value = Integer.valueOf(buffer.getInt32()); + value = valueOf(buffer.getInt32()); javaType = Types.INTEGER; length = 4; break; @@ -318,7 +325,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint8() : // buffer.getInt8()); - value = Integer.valueOf(buffer.getInt8()); + value = valueOf(buffer.getInt8()); javaType = Types.TINYINT; // java.sql.Types.INTEGER; length = 1; break; @@ -327,7 +334,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint16() : // buffer.getInt16()); - value = Integer.valueOf((short) buffer.getInt16()); + value = valueOf((short) buffer.getInt16()); javaType = Types.SMALLINT; // java.sql.Types.INTEGER; length = 2; break; @@ -336,7 +343,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint24() : // buffer.getInt24()); - value = Integer.valueOf(buffer.getInt24()); + value = valueOf(buffer.getInt24()); javaType = Types.INTEGER; length = 3; break; @@ -345,7 +352,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina // XXX: How to check signed / unsigned? // value = unsigned ? buffer.getUlong64()) : // Long.valueOf(buffer.getLong64()); - value = Long.valueOf(buffer.getLong64()); + value = valueOf(buffer.getLong64()); javaType = Types.BIGINT; // Types.INTEGER; length = 8; break; @@ -733,7 +740,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina String second = null; if (intpart == 0) { - second = "00:00:00"; + second = frac < 0 ? "-00:00:00" : "00:00:00"; } else { // 目前只记录秒,不处理us frac // if (cal == null) cal = Calendar.getInstance(); @@ -870,7 +877,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina } // logger.warn("MYSQL_TYPE_ENUM : This enumeration value is " // + "only used internally and cannot exist in a binlog!"); - value = Integer.valueOf(int32); + value = valueOf(int32); javaType = Types.INTEGER; length = len; break; @@ -1017,7 +1024,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina javaType = Types.VARBINARY; value = binary; } else { - value = buffer.getFullString(len, charsetName); + value = buffer.getFullString(len, charset); javaType = Types.VARCHAR; } @@ -1039,7 +1046,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina javaType = Types.BINARY; value = binary; } else { - value = buffer.getFullString(len, charsetName); + value = buffer.getFullString(len, charset); javaType = Types.CHAR; // Types.VARCHAR; } length = len; @@ -1074,7 +1081,7 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina len, columnName, columnIndex, - charsetName); + charset); value = builder.toString(); buffer.position(position + len); } else { @@ -1088,9 +1095,9 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina Json_Value jsonValue = JsonConversion.parse_value(buffer.getUint8(), buffer, len - 1, - charsetName); + charset); StringBuilder builder = new StringBuilder(); - jsonValue.toJsonString(builder, charsetName); + jsonValue.toJsonString(builder, charset); value = builder.toString(); buffer.position(position + len); } @@ -1133,6 +1140,8 @@ final Serializable fetchValue(String columnName, int columnIndex, int type, fina length = len; break; } + case LogEvent.MYSQL_TYPE_BOOL : + case LogEvent.MYSQL_TYPE_INVALID : default: logger.error(String.format("!! Don't know how to handle column type=%d meta=%d (%04X)", type, @@ -1209,4 +1218,38 @@ public static void appendNumber2(StringBuilder builder, int d) { builder.append('0').append(digits[d]); } } + + public static Long valueOf(long value) { + if (value >= -128 & value <= 127) { + // if (l >= -128 && l <= 127) { + return Long.valueOf(value); + } + if (value > 127 && value < longCacheLimit) { + int cacheIndex = (int) value - 127; + Long cacheValue = longCache[cacheIndex]; + if (cacheValue == null) { + cacheValue = new Long(value); + longCache[cacheIndex] = cacheValue; + } + return cacheValue; + } + return new Long(value); + } + + public static Integer valueOf(int value) { + if (value >= -128 & value <= 127) { + // if (l >= -128 && l <= 127) { + return Integer.valueOf(value); + } + if (value > 127 && value < integerCacheLimit) { + int cacheIndex = (int) value - 127; + Integer cacheValue = integerCache[cacheIndex]; + if (cacheValue == null) { + cacheValue = new Integer(value); + integerCache[cacheIndex] = cacheValue; + } + return cacheValue; + } + return new Integer(value); + } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogEvent.java index e1e78cd158..465b56e5f9 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsLogEvent.java @@ -1,12 +1,13 @@ package com.taobao.tddl.dbsync.binlog.event; +import java.nio.charset.Charset; import java.util.BitSet; -import com.taobao.tddl.dbsync.binlog.exception.TableIdNotFoundException; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.LogContext; import com.taobao.tddl.dbsync.binlog.LogEvent; import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent.ColumnInfo; +import com.taobao.tddl.dbsync.binlog.exception.TableIdNotFoundException; /** * Common base class for all row-containing log events. @@ -117,9 +118,14 @@ public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEven this(header, buffer, descriptionEvent, false); } - public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, boolean partial){ - super(header); + public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, + boolean partial){ + this(header, buffer, descriptionEvent, false, false); + } + public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, boolean partial, + boolean compress){ + super(header); final int commonHeaderLen = descriptionEvent.commonHeaderLen; final int postHeaderLen = descriptionEvent.postHeaderLen[header.type - 1]; int headerLen = 0; @@ -166,7 +172,8 @@ public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEven columns = buffer.getBitmap(columnLen); if (header.type == UPDATE_ROWS_EVENT_V1 || header.type == UPDATE_ROWS_EVENT - || header.type == PARTIAL_UPDATE_ROWS_EVENT) { + || header.type == PARTIAL_UPDATE_ROWS_EVENT || header.type == UPDATE_ROWS_COMPRESSED_EVENT + || header.type == UPDATE_ROWS_COMPRESSED_EVENT_V1) { changeColumns = buffer.getBitmap(columnLen); } else { changeColumns = columns; @@ -174,6 +181,18 @@ public RowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEven // XXX: Don't handle buffer in another thread. int dataSize = buffer.limit() - buffer.position(); + if (compress) { + // mariadb compress log event + // see https://github.com/alibaba/canal/issues/4388 + buffer = buffer.uncompressBuf(); + dataSize = buffer.limit(); + // rewrite type + if (postHeaderLen == FormatDescriptionLogEvent.ROWS_HEADER_LEN_V2) { + header.type = header.type - WRITE_ROWS_COMPRESSED_EVENT + WRITE_ROWS_EVENT; + } else { + header.type = header.type - WRITE_ROWS_COMPRESSED_EVENT_V1 + WRITE_ROWS_EVENT_V1; + } + } rowsBuf = buffer.duplicate(dataSize); } @@ -219,8 +238,8 @@ public final BitSet getChangeColumns() { return changeColumns; } - public final RowsLogBuffer getRowsBuf(String charsetName) { - return new RowsLogBuffer(rowsBuf, columnLen, charsetName, jsonColumnCount, partial); + public final RowsLogBuffer getRowsBuf(Charset charset) { + return new RowsLogBuffer(rowsBuf, columnLen, charset, jsonColumnCount, partial); } public final int getFlags(final int flags) { diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsQueryLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsQueryLogEvent.java index 16293ae174..d4ba23c683 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsQueryLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/RowsQueryLogEvent.java @@ -2,6 +2,8 @@ import com.taobao.tddl.dbsync.binlog.LogBuffer; +import java.nio.charset.StandardCharsets; + /** * @author jianghang 2013-4-8 上午12:36:29 * @version 1.0.3 @@ -23,7 +25,7 @@ public RowsQueryLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLo */ int offset = commonHeaderLen + postHeaderLen + 1; int len = buffer.limit() - offset; - rowsQuery = buffer.getFullString(offset, len, LogBuffer.ISO_8859_1); + rowsQuery = buffer.getFullString(offset, len, StandardCharsets.ISO_8859_1); } public String getRowsQuery() { diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/StartLogEventV3.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/StartLogEventV3.java index f230759baa..37ed1ad97a 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/StartLogEventV3.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/StartLogEventV3.java @@ -36,7 +36,7 @@ public StartLogEventV3(LogHeader header, LogBuffer buffer, FormatDescriptionLogE buffer.position(descriptionEvent.commonHeaderLen); binlogVersion = buffer.getUint16(); // ST_BINLOG_VER_OFFSET - serverVersion = buffer.getFixString(ST_SERVER_VER_LEN); // ST_SERVER_VER_OFFSET + serverVersion = buffer.getFixName(ST_SERVER_VER_LEN); // ST_SERVER_VER_OFFSET } public StartLogEventV3(){ diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TableMapLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TableMapLogEvent.java index 27651c6e6c..ead820ee8b 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TableMapLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TableMapLogEvent.java @@ -1,12 +1,11 @@ package com.taobao.tddl.dbsync.binlog.event; +import com.taobao.tddl.dbsync.binlog.LogBuffer; +import com.taobao.tddl.dbsync.binlog.LogEvent; import java.util.ArrayList; import java.util.BitSet; import java.util.List; -import com.taobao.tddl.dbsync.binlog.LogBuffer; -import com.taobao.tddl.dbsync.binlog.LogEvent; - /** * In row-based mode, every row operation event is preceded by a * Table_map_log_event which maps a table definition to a number. The table @@ -343,12 +342,14 @@ public static final class ColumnInfo { public int charset; // 可以通过CharsetUtil进行转化 public int geoType; public boolean nullable; - - @Override - public String toString() { - return "ColumnInfo [type=" + type + ", meta=" + meta + ", name=" + name + ", unsigned=" + unsigned - + ", pk=" + pk + ", set_enum_values=" + set_enum_values + ", charset=" + charset + ", geoType=" - + geoType + ", nullable=" + nullable + "]"; + public boolean visibility; + public boolean array; + + @Override public String toString() { + return "ColumnInfo{" + "type=" + type + ", meta=" + meta + ", name='" + name + '\'' + ", unsigned=" + + unsigned + ", pk=" + pk + ", set_enum_values=" + set_enum_values + ", charset=" + charset + + ", geoType=" + geoType + ", nullable=" + nullable + ", visibility=" + visibility + ", array=" + + array + '}'; } } @@ -382,6 +383,12 @@ public String toString() { public static final int SIMPLE_PRIMARY_KEY = 8; // Primary key with prefix public static final int PRIMARY_KEY_WITH_PREFIX = 9; + // Character set of enum and set columns, optimized to minimize space when many columns have the same charset. + public static final int ENUM_AND_SET_DEFAULT_CHARSET = 10; + // Character set of enum and set columns, optimized to minimize space when many columns have the same charset. + public static final int ENUM_AND_SET_COLUMN_CHARSET = 11; + // Flag to indicate column visibility attribute + public static final int COLUMN_VISIBILITY = 12; private int default_charset; private boolean existOptionalMetaData = false; @@ -416,10 +423,10 @@ public TableMapLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLog /* Read the variable part of the event */ buffer.position(commonHeaderLen + postHeaderLen); - dbname = buffer.getString(); + dbname = buffer.getName(); buffer.forward(1); /* termination null */ // fixed issue #2714 - tblname = buffer.getString(); + tblname = buffer.getName(); buffer.forward(1); /* termination null */ // Read column information from buffer @@ -484,6 +491,15 @@ public TableMapLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLog case PRIMARY_KEY_WITH_PREFIX: parse_pk_with_prefix(buffer, len); break; + case ENUM_AND_SET_DEFAULT_CHARSET: + parse_default_charset(buffer, len); + break; + case ENUM_AND_SET_COLUMN_CHARSET: + parse_column_charset(buffer, len); + break; + case COLUMN_VISIBILITY: + parse_column_visibility(buffer, len); + break; default: throw new IllegalArgumentException("unknow type : " + type); } @@ -529,12 +545,16 @@ public TableMapLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLog */ private final void decodeFields(LogBuffer buffer, final int len) { final int limit = buffer.limit(); - buffer.limit(len + buffer.position()); for (int i = 0; i < columnCnt; i++) { ColumnInfo info = columnInfo[i]; - switch (info.type) { + int binlogType = info.type; + if (binlogType == MYSQL_TYPE_TYPED_ARRAY) { + binlogType = buffer.getUint8(); + } + + switch (binlogType) { case MYSQL_TYPE_TINY_BLOB: case MYSQL_TYPE_BLOB: case MYSQL_TYPE_MEDIUM_BLOB: @@ -542,6 +562,9 @@ private final void decodeFields(LogBuffer buffer, final int len) { case MYSQL_TYPE_DOUBLE: case MYSQL_TYPE_FLOAT: case MYSQL_TYPE_GEOMETRY: + case MYSQL_TYPE_TIME2: + case MYSQL_TYPE_DATETIME2: + case MYSQL_TYPE_TIMESTAMP2: case MYSQL_TYPE_JSON: /* * These types store a single byte. @@ -550,14 +573,6 @@ private final void decodeFields(LogBuffer buffer, final int len) { break; case MYSQL_TYPE_SET: case MYSQL_TYPE_ENUM: - /* - * log_event.h : MYSQL_TYPE_SET & MYSQL_TYPE_ENUM : This - * enumeration value is only used internally and cannot - * exist in a binlog. - */ - logger.warn("This enumeration value is only used internally " - + "and cannot exist in a binlog: type=" + info.type); - break; case MYSQL_TYPE_STRING: { /* * log_event.h : The first byte is always @@ -585,12 +600,6 @@ private final void decodeFields(LogBuffer buffer, final int len) { info.meta = x; break; } - case MYSQL_TYPE_TIME2: - case MYSQL_TYPE_DATETIME2: - case MYSQL_TYPE_TIMESTAMP2: { - info.meta = buffer.getUint8(); - break; - } default: info.meta = 0; break; @@ -648,6 +657,20 @@ private List parse_column_charset(LogBuffer buffer, int length) { return datas; } + + private void parse_column_visibility(LogBuffer buffer, int length) { + List data = new ArrayList<>(columnInfo.length); + for (int i = 0; i < length; i++) { + int ut = buffer.getUint8(); + for (int c = 0x80; c != 0; c >>= 1) { + data.add((ut & c) > 0); + } + } + for (int i = 0; i < columnCnt; i++) { + columnInfo[i].visibility = data.get(i); + } + } + private void parse_column_name(LogBuffer buffer, int length) { // stores column names extracted from field int limit = buffer.position() + length; diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TransactionPayloadLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TransactionPayloadLogEvent.java new file mode 100644 index 0000000000..148240b760 --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/TransactionPayloadLogEvent.java @@ -0,0 +1,95 @@ +package com.taobao.tddl.dbsync.binlog.event; + +import com.taobao.tddl.dbsync.binlog.LogBuffer; +import com.taobao.tddl.dbsync.binlog.LogEvent; + +/** + * @author agapple 2022年5月23日 下午7:05:39 + * @version 1.1.7 + * @since mysql 8.0.20 + */ +public class TransactionPayloadLogEvent extends LogEvent { + + public static final short COMPRESSION_TYPE_MIN_LENGTH = 1; + public static final short COMPRESSION_TYPE_MAX_LENGTH = 9; + public static final short PAYLOAD_SIZE_MIN_LENGTH = 0; + public static final short PAYLOAD_SIZE_MAX_LENGTH = 9; + public static final short UNCOMPRESSED_SIZE_MIN_LENGTH = 0; + public static final short UNCOMPRESSED_SIZE_MAX_LENGTH = 9; + public static final int MAX_DATA_LENGTH = COMPRESSION_TYPE_MAX_LENGTH + + PAYLOAD_SIZE_MAX_LENGTH + + UNCOMPRESSED_SIZE_MAX_LENGTH; + + /** Marks the end of the payload header. */ + public static final int OTW_PAYLOAD_HEADER_END_MARK = 0; + + /** The payload field */ + public static final int OTW_PAYLOAD_SIZE_FIELD = 1; + + /** The compression type field */ + public static final int OTW_PAYLOAD_COMPRESSION_TYPE_FIELD = 2; + + /** The uncompressed size field */ + public static final int OTW_PAYLOAD_UNCOMPRESSED_SIZE_FIELD = 3; + + /* ZSTD compression. */ + public final static int COMPRESS_TYPE_ZSTD = 0; + /* No compression. */ + public final static int COMPRESS_TYPE_NONE = 255; + + private long m_compression_type = COMPRESS_TYPE_NONE; + private long m_payload_size; + private long m_uncompressed_size; + private byte[] m_payload; + + public TransactionPayloadLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ + super(header); + + final int commonHeaderLen = descriptionEvent.getCommonHeaderLen(); + final int postHeaderLen = descriptionEvent.getPostHeaderLen()[header.getType() - 1]; + + int offset = commonHeaderLen; + buffer.position(offset); + long type = 0, length = 0; + while (buffer.hasRemaining()) { + type = buffer.getPackedLong(); // type + if (type == OTW_PAYLOAD_HEADER_END_MARK) { + break; + } + + length = buffer.getPackedLong(); // length + switch ((int) type) { + case OTW_PAYLOAD_SIZE_FIELD: + m_payload_size = buffer.getPackedLong(); // value + break; + case OTW_PAYLOAD_COMPRESSION_TYPE_FIELD: + m_compression_type = buffer.getPackedLong(); // value + break; + case OTW_PAYLOAD_UNCOMPRESSED_SIZE_FIELD: + m_uncompressed_size = buffer.getPackedLong(); // value + break; + default: + buffer.forward((int) length); + break; + } + + } + + if (m_uncompressed_size == 0) { + m_uncompressed_size = m_payload_size; + } + m_payload = buffer.getData((int) m_payload_size); + } + + public boolean isCompressByZstd() { + return m_compression_type == COMPRESS_TYPE_ZSTD; + } + + public boolean isCompressByNone() { + return m_compression_type == COMPRESS_TYPE_NONE; + } + + public byte[] getPayload() { + return m_payload; + } +} diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UpdateRowsLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UpdateRowsLogEvent.java index b604e04d9a..75e8e08592 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UpdateRowsLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UpdateRowsLogEvent.java @@ -11,14 +11,19 @@ * @author Changyuan.lh * @version 1.0 */ -public final class UpdateRowsLogEvent extends RowsLogEvent { +public class UpdateRowsLogEvent extends RowsLogEvent { public UpdateRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ - super(header, buffer, descriptionEvent, false); + super(header, buffer, descriptionEvent, false , false); } public UpdateRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, boolean partial){ - super(header, buffer, descriptionEvent, partial); + super(header, buffer, descriptionEvent, partial ,false); + } + + public UpdateRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, + boolean partial , boolean compress){ + super(header, buffer, descriptionEvent, partial , compress); } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UserVarLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UserVarLogEvent.java index d6acb94c28..19d5320e5e 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UserVarLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/UserVarLogEvent.java @@ -2,6 +2,7 @@ import java.io.IOException; import java.io.Serializable; +import java.nio.charset.Charset; import com.taobao.tddl.dbsync.binlog.CharsetConversion; import com.taobao.tddl.dbsync.binlog.LogBuffer; @@ -100,8 +101,8 @@ public UserVarLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogE value = buffer.getDecimal(precision, scale); // bin2decimal break; case STRING_RESULT: - String charsetName = CharsetConversion.getJavaCharset(charsetNumber); - value = buffer.getFixString(valueLen, charsetName); + Charset charset = CharsetConversion.getNioCharset(charsetNumber); + value = buffer.getFixString(valueLen, charset); break; case ROW_RESULT: // this seems to be banned in MySQL altogether diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/WriteRowsLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/WriteRowsLogEvent.java index 42ba98d64f..b308f09523 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/WriteRowsLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/WriteRowsLogEvent.java @@ -9,9 +9,14 @@ * @author Changyuan.lh * @version 1.0 */ -public final class WriteRowsLogEvent extends RowsLogEvent { +public class WriteRowsLogEvent extends RowsLogEvent { public WriteRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ - super(header, buffer, descriptionEvent); + super(header, buffer, descriptionEvent, false, false); + } + + public WriteRowsLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent, + boolean compress){ + super(header, buffer, descriptionEvent, false, compress); } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/AnnotateRowsEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/AnnotateRowsEvent.java index cbb8387160..199920281b 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/AnnotateRowsEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/AnnotateRowsEvent.java @@ -5,6 +5,8 @@ import com.taobao.tddl.dbsync.binlog.event.IgnorableLogEvent; import com.taobao.tddl.dbsync.binlog.event.LogHeader; +import java.nio.charset.StandardCharsets; + /** * mariadb的ANNOTATE_ROWS_EVENT类型 * @@ -23,7 +25,7 @@ public AnnotateRowsEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLo int offset = commonHeaderLen + postHeaderLen; int len = buffer.limit() - offset; - rowsQuery = buffer.getFullString(offset, len, LogBuffer.ISO_8859_1); + rowsQuery = buffer.getFullString(offset, len, StandardCharsets.ISO_8859_1); } public String getRowsQuery() { diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/BinlogCheckPointLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/BinlogCheckPointLogEvent.java index aaa2a286d1..71bf6b75b5 100644 --- a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/BinlogCheckPointLogEvent.java +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/BinlogCheckPointLogEvent.java @@ -13,9 +13,20 @@ */ public class BinlogCheckPointLogEvent extends IgnorableLogEvent { + private final String filename; + public BinlogCheckPointLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ super(header, buffer, descriptionEvent); - // do nothing , just mariadb binlog checkpoint + // mariadb binlog checkpoint + final int headerSize = descriptionEvent.getCommonHeaderLen(); + final int postHeaderLen = descriptionEvent.getPostHeaderLen()[getHeader().getType() - 1]; + + buffer.position(headerSize); + long binlogFileLen = buffer.getUint32(); + filename = buffer.getFixString((int) binlogFileLen); } + public String getFilename() { + return filename; + } } diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/DeleteRowsCompressLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/DeleteRowsCompressLogEvent.java new file mode 100644 index 0000000000..b8cad450d3 --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/DeleteRowsCompressLogEvent.java @@ -0,0 +1 @@ +package com.taobao.tddl.dbsync.binlog.event.mariadb; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; import com.taobao.tddl.dbsync.binlog.event.LogHeader; /** * mariadb compress rows event * * @author jianghang * @since 1.1.7 */ public class DeleteRowsCompressLogEvent extends DeleteRowsLogEvent { public DeleteRowsCompressLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ super(header, buffer, descriptionEvent, true); } } \ No newline at end of file diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/QueryCompressedLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/QueryCompressedLogEvent.java new file mode 100644 index 0000000000..1c12cde750 --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/QueryCompressedLogEvent.java @@ -0,0 +1 @@ +package com.taobao.tddl.dbsync.binlog.event.mariadb; import java.io.IOException; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; import com.taobao.tddl.dbsync.binlog.event.LogHeader; import com.taobao.tddl.dbsync.binlog.event.QueryLogEvent; /** * mariadb compress query event * * @author jianghang * @since 1.1.7 */ public class QueryCompressedLogEvent extends QueryLogEvent { public QueryCompressedLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent) throws IOException{ super(header, buffer, descriptionEvent, true); } } \ No newline at end of file diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/UpdateRowsCompressLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/UpdateRowsCompressLogEvent.java new file mode 100644 index 0000000000..5af9464c94 --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/UpdateRowsCompressLogEvent.java @@ -0,0 +1 @@ +package com.taobao.tddl.dbsync.binlog.event.mariadb; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; import com.taobao.tddl.dbsync.binlog.event.LogHeader; import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; /** * mariadb compress rows event * * @author jianghang * @since 1.1.7 */ public class UpdateRowsCompressLogEvent extends UpdateRowsLogEvent { public UpdateRowsCompressLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ super(header, buffer, descriptionEvent, false, true); } } \ No newline at end of file diff --git a/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/WriteRowsCompressLogEvent.java b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/WriteRowsCompressLogEvent.java new file mode 100644 index 0000000000..b6769aa193 --- /dev/null +++ b/dbsync/src/main/java/com/taobao/tddl/dbsync/binlog/event/mariadb/WriteRowsCompressLogEvent.java @@ -0,0 +1 @@ +package com.taobao.tddl.dbsync.binlog.event.mariadb; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; import com.taobao.tddl.dbsync.binlog.event.LogHeader; import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; /** * mariadb compress rows event * * @author jianghang * @since 1.1.7 */ public class WriteRowsCompressLogEvent extends WriteRowsLogEvent { public WriteRowsCompressLogEvent(LogHeader header, LogBuffer buffer, FormatDescriptionLogEvent descriptionEvent){ super(header, buffer, descriptionEvent, true); } } \ No newline at end of file diff --git a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/BaseLogFetcherTest.java b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/BaseLogFetcherTest.java index 27ecf3db49..4980a2924d 100644 --- a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/BaseLogFetcherTest.java +++ b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/BaseLogFetcherTest.java @@ -52,7 +52,7 @@ protected void parseRowsEvent(RowsLogEvent event) { event.getHeader().getLogPos() - event.getHeader().getEventLen(), event.getTable().getDbName(), event.getTable().getTableName())); - RowsLogBuffer buffer = event.getRowsBuf(charset.name()); + RowsLogBuffer buffer = event.getRowsBuf(charset); BitSet columns = event.getColumns(); BitSet changeColumns = event.getChangeColumns(); while (buffer.nextOneRow(columns)) { diff --git a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/DirectLogFetcherTest.java b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/DirectLogFetcherTest.java index 2194c63f2c..861a381e5a 100644 --- a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/DirectLogFetcherTest.java +++ b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/DirectLogFetcherTest.java @@ -4,19 +4,16 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.Statement; +import java.util.List; +import com.taobao.tddl.dbsync.binlog.event.mariadb.BinlogCheckPointLogEvent; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.QueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RotateLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.XidLogEvent; +import com.taobao.tddl.dbsync.binlog.event.*; import com.taobao.tddl.dbsync.binlog.event.mariadb.AnnotateRowsEvent; + @Ignore public class DirectLogFetcherTest extends BaseLogFetcherTest { @@ -25,52 +22,20 @@ public void testSimple() { DirectLogFetcher fecther = new DirectLogFetcher(); try { Class.forName("com.mysql.jdbc.Driver"); - Connection connection = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3306", "root", "hello"); + Connection connection = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3306", "root", "123456"); Statement statement = connection.createStatement(); statement.execute("SET @master_binlog_checksum='@@global.binlog_checksum'"); statement.execute("SET @mariadb_slave_capability='" + LogEvent.MARIA_SLAVE_CAPABILITY_MINE + "'"); - fecther.open(connection, "mysql-bin.000007", 89797036L, 2); + fecther.open(connection, "mysql-bin.000002", 4L, 1); LogDecoder decoder = new LogDecoder(LogEvent.UNKNOWN_EVENT, LogEvent.ENUM_END_EVENT); LogContext context = new LogContext(); while (fecther.fetch()) { LogEvent event = decoder.decode(fecther, context); - int eventType = event.getHeader().getType(); - switch (eventType) { - case LogEvent.ROTATE_EVENT: - binlogFileName = ((RotateLogEvent) event).getFilename(); - break; - case LogEvent.WRITE_ROWS_EVENT_V1: - case LogEvent.WRITE_ROWS_EVENT: - parseRowsEvent((WriteRowsLogEvent) event); - break; - case LogEvent.UPDATE_ROWS_EVENT_V1: - case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: - case LogEvent.UPDATE_ROWS_EVENT: - parseRowsEvent((UpdateRowsLogEvent) event); - break; - case LogEvent.DELETE_ROWS_EVENT_V1: - case LogEvent.DELETE_ROWS_EVENT: - parseRowsEvent((DeleteRowsLogEvent) event); - break; - case LogEvent.QUERY_EVENT: - parseQueryEvent((QueryLogEvent) event); - break; - case LogEvent.ROWS_QUERY_LOG_EVENT: - parseRowsQueryEvent((RowsQueryLogEvent) event); - break; - case LogEvent.ANNOTATE_ROWS_EVENT: - parseAnnotateRowsEvent((AnnotateRowsEvent) event); - break; - case LogEvent.XID_EVENT: - parseXidEvent((XidLogEvent) event); - break; - default: - break; - } + processEvent(event, decoder, context); } - } catch (Exception e) { + } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { @@ -82,4 +47,49 @@ public void testSimple() { } } + + public void processEvent(LogEvent event, LogDecoder decoder, LogContext context) throws Throwable { + int eventType = event.getHeader().getType(); + switch (eventType) { + case LogEvent.ROTATE_EVENT: + binlogFileName = ((RotateLogEvent) event).getFilename(); + break; + case LogEvent.BINLOG_CHECKPOINT_EVENT: + binlogFileName = ((BinlogCheckPointLogEvent) event).getFilename(); + break; + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + parseRowsEvent((WriteRowsLogEvent) event); + break; + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + parseRowsEvent((UpdateRowsLogEvent) event); + break; + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + parseRowsEvent((DeleteRowsLogEvent) event); + break; + case LogEvent.QUERY_EVENT: + parseQueryEvent((QueryLogEvent) event); + break; + case LogEvent.ROWS_QUERY_LOG_EVENT: + parseRowsQueryEvent((RowsQueryLogEvent) event); + break; + case LogEvent.ANNOTATE_ROWS_EVENT: + parseAnnotateRowsEvent((AnnotateRowsEvent) event); + break; + case LogEvent.XID_EVENT: + parseXidEvent((XidLogEvent) event); + break; + case LogEvent.TRANSACTION_PAYLOAD_EVENT: + List events = decoder.processIterateDecode(event, context); + for (LogEvent deEvent : events) { + processEvent(deEvent, decoder, context); + } + break; + default: + break; + } + } } diff --git a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/FileLogFetcherTest.java b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/FileLogFetcherTest.java index c4012a8345..22dd95fff4 100644 --- a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/FileLogFetcherTest.java +++ b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/FileLogFetcherTest.java @@ -3,6 +3,7 @@ import java.io.File; import java.io.IOException; import java.net.URL; +import java.util.List; import org.junit.Assert; import org.junit.Before; @@ -43,43 +44,9 @@ public void testSimple() { while (fetcher.fetch()) { LogEvent event = null; event = decoder.decode(fetcher, context); - if (event != null) { - int eventType = event.getHeader().getType(); - switch (eventType) { - case LogEvent.ROTATE_EVENT: - binlogFileName = ((RotateLogEvent) event).getFilename(); - break; - case LogEvent.WRITE_ROWS_EVENT_V1: - case LogEvent.WRITE_ROWS_EVENT: - parseRowsEvent((WriteRowsLogEvent) event); - break; - case LogEvent.UPDATE_ROWS_EVENT_V1: - case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: - case LogEvent.UPDATE_ROWS_EVENT: - parseRowsEvent((UpdateRowsLogEvent) event); - break; - case LogEvent.DELETE_ROWS_EVENT_V1: - case LogEvent.DELETE_ROWS_EVENT: - parseRowsEvent((DeleteRowsLogEvent) event); - break; - case LogEvent.QUERY_EVENT: - parseQueryEvent((QueryLogEvent) event); - break; - case LogEvent.ROWS_QUERY_LOG_EVENT: - parseRowsQueryEvent((RowsQueryLogEvent) event); - break; - case LogEvent.ANNOTATE_ROWS_EVENT: - parseAnnotateRowsEvent((AnnotateRowsEvent) event); - break; - case LogEvent.XID_EVENT: - parseXidEvent((XidLogEvent) event); - break; - default: - break; - } - } + processEvent(event, decoder, context); } - } catch (Exception e) { + } catch (Throwable e) { Assert.fail(e.getMessage()); } finally { try { @@ -89,4 +56,46 @@ public void testSimple() { } } } + + public void processEvent(LogEvent event, LogDecoder decoder, LogContext context) throws Throwable { + int eventType = event.getHeader().getType(); + switch (eventType) { + case LogEvent.ROTATE_EVENT: + binlogFileName = ((RotateLogEvent) event).getFilename(); + break; + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + parseRowsEvent((WriteRowsLogEvent) event); + break; + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + parseRowsEvent((UpdateRowsLogEvent) event); + break; + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + parseRowsEvent((DeleteRowsLogEvent) event); + break; + case LogEvent.QUERY_EVENT: + parseQueryEvent((QueryLogEvent) event); + break; + case LogEvent.ROWS_QUERY_LOG_EVENT: + parseRowsQueryEvent((RowsQueryLogEvent) event); + break; + case LogEvent.ANNOTATE_ROWS_EVENT: + parseAnnotateRowsEvent((AnnotateRowsEvent) event); + break; + case LogEvent.XID_EVENT: + parseXidEvent((XidLogEvent) event); + break; + case LogEvent.TRANSACTION_PAYLOAD_EVENT: + List events = decoder.processIterateDecode(event, context); + for (LogEvent deEvent : events) { + processEvent(deEvent, decoder, context); + } + break; + default: + break; + } + } } diff --git a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversionTest.java b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversionTest.java index 8401983bff..b2c1f514d5 100644 --- a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversionTest.java +++ b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/JsonDiffConversionTest.java @@ -37,7 +37,7 @@ public void print_json_diffInputNotNullPositiveNotNullZeroNotNullOutputIllegalAr final long len = 71L; final String columnName = "foo"; final int columnIndex = 0; - final String charsetName = "foo"; + final String charsetName = "gbk"; try { // Act @@ -78,7 +78,7 @@ public void print_json_diffInputNotNullZeroNotNullZeroNotNullOutputIllegalArgume final long len = 0L; final String columnName = "foo"; final int columnIndex = 0; - final String charsetName = "foo"; + final String charsetName = "gbk"; try { // Act @@ -119,7 +119,7 @@ public void print_json_diffInputNotNullZeroNotNullZeroNotNullOutputIllegalArgume final long len = 0L; final String columnName = "foo"; final int columnIndex = 0; - final String charsetName = "foo"; + final String charsetName = "gbk"; try { // Act @@ -160,7 +160,7 @@ public void print_json_diffInputNotNullZeroNotNullZeroNotNullOutputIllegalArgume final long len = 0L; final String columnName = "foo"; final int columnIndex = 0; - final String charsetName = "foo"; + final String charsetName = "gbk"; try { // Act @@ -191,7 +191,7 @@ public void print_json_diffInputNotNullZeroNotNullZeroNotNullOutputNotNull() { final long len = 0L; final String columnName = ","; final int columnIndex = 0; - final String charsetName = "1a 2b 3c"; + final String charsetName = "gbk"; // Act final StringBuilder actual = JsonDiffConversion.print_json_diff(buffer, diff --git a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/LogEventTest.java b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/LogEventTest.java index bd38816f18..ac65f2d1e3 100644 --- a/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/LogEventTest.java +++ b/dbsync/src/test/java/com/taobao/tddl/dbsync/binlog/LogEventTest.java @@ -63,13 +63,13 @@ public void getTypeNameInputPositiveOutputNotNull3() { public void getTypeNameInputPositiveOutputNotNull4() { // Arrange - final int type = 36; + final int type = 80; // Act final String actual = LogEvent.getTypeName(type); // Assert result - Assert.assertEquals("Unknown", actual); + Assert.assertTrue(actual.startsWith("Unknown")); } // Test written by Diffblue Cover. diff --git a/deployer/pom.xml b/deployer/pom.xml index adb79c00be..a9da248e35 100644 --- a/deployer/pom.xml +++ b/deployer/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/deployer/src/main/bin/startup.sh b/deployer/src/main/bin/startup.sh index 7624349801..fbde043127 100644 --- a/deployer/src/main/bin/startup.sh +++ b/deployer/src/main/bin/startup.sh @@ -79,11 +79,27 @@ in exit;; esac +JavaVersion=`$JAVA -version 2>&1 |awk 'NR==1{ gsub(/"/,""); print $3 }' | awk -F '.' '{print $1}'` str=`file -L $JAVA | grep 64-bit` +JAVA_OPTS="$JAVA_OPTS -Xss1m -XX:+AggressiveOpts -XX:-UseBiasedLocking -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$base/logs" + +if [ $JavaVersion -ge 11 ] ; then + #JAVA_OPTS="$JAVA_OPTS -Xlog:gc*:$base_log/gc.log:time " + JAVA_OPTS="$JAVA_OPTS" +else + #JAVA_OPTS="$JAVA_OPTS -Xloggc:$base/logs/canal/gc.log -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime" + JAVA_OPTS="$JAVA_OPTS -XX:+UseFastAccessorMethods -XX:+PrintAdaptiveSizePolicy -XX:+PrintTenuringDistribution" +fi + if [ -n "$str" ]; then - JAVA_OPTS="-server -Xms2048m -Xmx3072m -Xmn1024m -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -Xss256k -XX:-UseAdaptiveSizePolicy -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError" + if [ $JavaVersion -ge 11 ] ; then + # For G1 + JAVA_OPTS="-server -Xms2g -Xmx3g -XX:+UseG1GC -XX:MaxGCPauseMillis=250 -XX:+UseGCOverheadLimit -XX:+ExplicitGCInvokesConcurrent $JAVA_OPTS" + else + JAVA_OPTS="-server -Xms2g -Xmx3g -Xmn1g -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC $JAVA_OPTS" + fi else - JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m " + JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m $JAVA_OPTS" fi JAVA_OPTS=" $JAVA_OPTS -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dfile.encoding=UTF-8" diff --git a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java index ad41ef31ae..55e34eda6a 100644 --- a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java +++ b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalConstants.java @@ -30,6 +30,7 @@ public class CanalConstants { public static final String CANAL_WITHOUT_NETTY = ROOT + "." + "withoutNetty"; public static final String CANAL_DESTINATIONS = ROOT + "." + "destinations"; + public static final String CANAL_DESTINATIONS_EXPR = ROOT + "." + "destinations.expr"; public static final String CANAL_AUTO_SCAN = ROOT + "." + "auto.scan"; public static final String CANAL_AUTO_SCAN_INTERVAL = ROOT + "." + "auto.scan.interval"; public static final String CANAL_CONF_DIR = ROOT + "." + "conf.dir"; diff --git a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalController.java b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalController.java index 53de822c9e..11a915471c 100644 --- a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalController.java +++ b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalController.java @@ -1,18 +1,5 @@ package com.alibaba.otter.canal.deployer; -import java.util.Map; -import java.util.Properties; - -import org.I0Itec.zkclient.IZkStateListener; -import org.I0Itec.zkclient.exception.ZkNoNodeException; -import org.I0Itec.zkclient.exception.ZkNodeExistsException; -import org.apache.commons.lang.BooleanUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.zookeeper.Watcher.Event.KeeperState; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - import com.alibaba.otter.canal.common.utils.AddressUtils; import com.alibaba.otter.canal.common.zookeeper.ZkClientx; import com.alibaba.otter.canal.common.zookeeper.ZookeeperPathUtils; @@ -36,6 +23,26 @@ import com.google.common.base.Function; import com.google.common.collect.MapMaker; import com.google.common.collect.MigrateMap; +import org.I0Itec.zkclient.IZkStateListener; +import org.I0Itec.zkclient.exception.ZkNoNodeException; +import org.I0Itec.zkclient.exception.ZkNodeExistsException; +import org.apache.commons.lang.BooleanUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.zookeeper.Watcher.Event.KeeperState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.MDC; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static com.alibaba.otter.canal.deployer.CanalConstants.CANAL_DESTINATIONS; +import static com.alibaba.otter.canal.deployer.CanalConstants.CANAL_DESTINATIONS_EXPR; /** * canal调度控制器 @@ -58,7 +65,7 @@ public class CanalController { private boolean autoScan = true; private InstanceAction defaultAction; private Map instanceConfigMonitors; - private CanalServerWithEmbedded embededCanalServer; + private CanalServerWithEmbedded embeddedCanalServer; private CanalServerWithNetty canalServer; private CanalInstanceGenerator instanceGenerator; @@ -102,15 +109,15 @@ public CanalController(final Properties properties){ registerIp = getProperty(properties, CanalConstants.CANAL_REGISTER_IP); port = Integer.valueOf(getProperty(properties, CanalConstants.CANAL_PORT, "11111")); adminPort = Integer.valueOf(getProperty(properties, CanalConstants.CANAL_ADMIN_PORT, "11110")); - embededCanalServer = CanalServerWithEmbedded.instance(); - embededCanalServer.setCanalInstanceGenerator(instanceGenerator);// 设置自定义的instanceGenerator + embeddedCanalServer = CanalServerWithEmbedded.instance(); + embeddedCanalServer.setCanalInstanceGenerator(instanceGenerator);// 设置自定义的instanceGenerator int metricsPort = Integer.valueOf(getProperty(properties, CanalConstants.CANAL_METRICS_PULL_PORT, "11112")); - embededCanalServer.setMetricsPort(metricsPort); + embeddedCanalServer.setMetricsPort(metricsPort); this.adminUser = getProperty(properties, CanalConstants.CANAL_ADMIN_USER); this.adminPasswd = getProperty(properties, CanalConstants.CANAL_ADMIN_PASSWD); - embededCanalServer.setUser(getProperty(properties, CanalConstants.CANAL_USER)); - embededCanalServer.setPasswd(getProperty(properties, CanalConstants.CANAL_PASSWD)); + embeddedCanalServer.setUser(getProperty(properties, CanalConstants.CANAL_USER)); + embeddedCanalServer.setPasswd(getProperty(properties, CanalConstants.CANAL_PASSWD)); String canalWithoutNetty = getProperty(properties, CanalConstants.CANAL_WITHOUT_NETTY); if (canalWithoutNetty == null || "false".equals(canalWithoutNetty)) { @@ -149,7 +156,7 @@ public CanalController(final Properties properties){ public void processActiveEnter() { try { MDC.put(CanalConstants.MDC_DESTINATION, String.valueOf(destination)); - embededCanalServer.start(destination); + embeddedCanalServer.start(destination); if (canalMQStarter != null) { canalMQStarter.startDestination(destination); } @@ -164,7 +171,7 @@ public void processActiveExit() { if (canalMQStarter != null) { canalMQStarter.stopDestination(destination); } - embededCanalServer.stop(destination); + embeddedCanalServer.stop(destination); } finally { MDC.remove(CanalConstants.MDC_DESTINATION); } @@ -232,7 +239,7 @@ public void start(String destination) { instanceConfigs.put(destination, config); } - if (!embededCanalServer.isStart(destination)) { + if (!embeddedCanalServer.isStart(destination)) { // HA机制启动 ServerRunningMonitor runningMonitor = ServerRunningMonitors.getRunningMonitor(destination); if (!config.getLazy() && !runningMonitor.isStart()) { @@ -247,7 +254,7 @@ public void stop(String destination) { // 此处的stop,代表强制退出,非HA机制,所以需要退出HA的monitor和配置信息 InstanceConfig config = instanceConfigs.remove(destination); if (config != null) { - embededCanalServer.stop(destination); + embeddedCanalServer.stop(destination); ServerRunningMonitor runningMonitor = ServerRunningMonitors.getRunningMonitor(destination); if (runningMonitor.isStart()) { runningMonitor.stop(); @@ -377,7 +384,7 @@ private InstanceConfig initGlobalConfig(Properties properties) { instanceGenerator.setSpringXml(config.getSpringXml()); return instanceGenerator.generate(destination); } else { - throw new UnsupportedOperationException("unknow mode :" + config.getMode()); + throw new UnsupportedOperationException("unknown mode :" + config.getMode()); } }; @@ -390,7 +397,7 @@ private PlainCanalConfigClient getManagerClient(String managerAddress) { } private void initInstanceConfig(Properties properties) { - String destinationStr = getProperty(properties, CanalConstants.CANAL_DESTINATIONS); + String destinationStr = getDestinations(properties); String[] destinations = StringUtils.split(destinationStr, CanalConstants.CANAL_DESTINATION_SPLIT); for (String destination : destinations) { @@ -461,6 +468,40 @@ public static String getProperty(Properties properties, String key) { return StringUtils.trim(value); } + public static String getDestinations(Properties properties) { + String expr = getProperty(properties, CANAL_DESTINATIONS_EXPR); + if (StringUtils.isNotBlank(expr)) { + return parseExpr(expr); + } else { + return getProperty(properties, CANAL_DESTINATIONS); + } + } + + private static String parseExpr(String expr) { + String prefix = StringUtils.substringBefore(expr, "{"); + String range = StringUtils.substringAfter(expr, "{"); + range = StringUtils.substringBefore(range, "}"); + + String regex = "(\\d+)-(\\d+)"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(range); + if (matcher.find()) { + String head = matcher.group(1); + String tail = matcher.group(2); + int start = Integer.parseInt(head); + int end = Integer.parseInt(tail); + + List list = new ArrayList<>(); + for (int i = start; i <= end; i++) { + String d = prefix + i; + list.add(d); + } + return list.stream().map(Object::toString).collect(Collectors.joining(",")); + } else { + throw new CanalServerException("invalid destinations expr " + expr); + } + } + public void start() throws Throwable { logger.info("## start the canal server[{}({}):{}]", ip, registerIp, port); // 创建整个canal的工作节点 @@ -483,14 +524,14 @@ public void handleSessionEstablishmentError(Throwable error) throws Exception { } }); } - // 优先启动embeded服务 - embededCanalServer.start(); + // 优先启动embedded服务 + embeddedCanalServer.start(); // 尝试启动一下非lazy状态的通道 for (Map.Entry entry : instanceConfigs.entrySet()) { final String destination = entry.getKey(); InstanceConfig config = entry.getValue(); // 创建destination的工作节点 - if (!embededCanalServer.isStart(destination)) { + if (!embeddedCanalServer.isStart(destination)) { // HA机制启动 ServerRunningMonitor runningMonitor = ServerRunningMonitors.getRunningMonitor(destination); if (!config.getLazy() && !runningMonitor.isStart()) { @@ -558,6 +599,11 @@ public void stop() throws Throwable { } ZkClientx.clearClients(); + + // 需要释放 CanalServerWithEmbedded 否则主线程退出后,进程无法自动完整退出... + if (embeddedCanalServer != null) { + embeddedCanalServer.stop(); + } } private void initCid(String path) { diff --git a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalLauncher.java b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalLauncher.java index 2bc78994c3..11f4f5c7d3 100644 --- a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalLauncher.java +++ b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalLauncher.java @@ -36,6 +36,9 @@ public static void main(String[] args) { logger.info("## set default uncaught exception handler"); setGlobalUncaughtExceptionHandler(); + // 支持rocketmq client 配置日志路径 + System.setProperty("rocketmq.client.logUseSlf4j","true"); + logger.info("## load canal configurations"); String conf = System.getProperty("canal.conf", "classpath:canal.properties"); Properties properties = new Properties(); @@ -56,6 +59,10 @@ public static void main(String[] args) { CanalConstants.CANAL_ADMIN_AUTO_REGISTER)); String autoCluster = CanalController.getProperty(properties, CanalConstants.CANAL_ADMIN_AUTO_CLUSTER); String name = CanalController.getProperty(properties, CanalConstants.CANAL_ADMIN_REGISTER_NAME); + if (StringUtils.isEmpty(name)) { + name = AddressUtils.getHostName(); + } + String registerIp = CanalController.getProperty(properties, CanalConstants.CANAL_REGISTER_IP); if (StringUtils.isEmpty(registerIp)) { registerIp = AddressUtils.getHostIp(); diff --git a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStarter.java b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStarter.java index 386cc85f3c..4f56234791 100644 --- a/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStarter.java +++ b/deployer/src/main/java/com/alibaba/otter/canal/deployer/CanalStarter.java @@ -1,5 +1,6 @@ package com.alibaba.otter.canal.deployer; +import com.alibaba.otter.canal.connector.core.spi.ProxyCanalMQProducer; import java.util.Properties; import com.alibaba.otter.canal.connector.core.config.MQProperties; @@ -21,21 +22,21 @@ */ public class CanalStarter { - private static final Logger logger = LoggerFactory.getLogger(CanalStarter.class); + private static final Logger logger = LoggerFactory.getLogger(CanalStarter.class); - private static final String CONNECTOR_SPI_DIR = "/plugin"; + private static final String CONNECTOR_SPI_DIR = "/plugin"; private static final String CONNECTOR_STANDBY_SPI_DIR = "/canal/plugin"; - private CanalController controller = null; - private CanalMQProducer canalMQProducer = null; - private Thread shutdownThread = null; - private CanalMQStarter canalMQStarter = null; + private CanalController controller = null; + private CanalMQProducer canalMQProducer = null; + private Thread shutdownThread = null; + private CanalMQStarter canalMQStarter = null; private volatile Properties properties; - private volatile boolean running = false; + private volatile boolean running = false; private CanalAdminWithNetty canalAdmin; - public CanalStarter(Properties properties){ + public CanalStarter(Properties properties) { this.properties = properties; } @@ -67,10 +68,8 @@ public synchronized void start() throws Throwable { canalMQProducer = loader .getExtension(serverMode.toLowerCase(), CONNECTOR_SPI_DIR, CONNECTOR_STANDBY_SPI_DIR); if (canalMQProducer != null) { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - Thread.currentThread().setContextClassLoader(canalMQProducer.getClass().getClassLoader()); + canalMQProducer = new ProxyCanalMQProducer(canalMQProducer); canalMQProducer.init(properties); - Thread.currentThread().setContextClassLoader(cl); } } @@ -103,7 +102,7 @@ public synchronized void start() throws Throwable { if (canalMQProducer != null) { canalMQStarter = new CanalMQStarter(canalMQProducer); - String destinations = CanalController.getProperty(properties, CanalConstants.CANAL_DESTINATIONS); + String destinations = CanalController.getDestinations(properties); canalMQStarter.start(destinations); controller.setCanalMQStarter(canalMQStarter); } diff --git a/deployer/src/main/resources/canal.properties b/deployer/src/main/resources/canal.properties index 82992bcc2a..e6bdc4cc7d 100644 --- a/deployer/src/main/resources/canal.properties +++ b/deployer/src/main/resources/canal.properties @@ -148,8 +148,13 @@ kafka.max.in.flight.requests.per.connection = 1 kafka.retries = 0 kafka.kerberos.enable = false -kafka.kerberos.krb5.file = "../conf/kerberos/krb5.conf" -kafka.kerberos.jaas.file = "../conf/kerberos/jaas.conf" +kafka.kerberos.krb5.file = ../conf/kerberos/krb5.conf +kafka.kerberos.jaas.file = ../conf/kerberos/jaas.conf + +# sasl demo +# kafka.sasl.jaas.config = org.apache.kafka.common.security.scram.ScramLoginModule required \\n username=\"alice\" \\npassword="alice-secret\"; +# kafka.sasl.mechanism = SCRAM-SHA-512 +# kafka.security.protocol = SASL_PLAINTEXT ################################################## ######### RocketMQ ############# @@ -179,4 +184,4 @@ rabbitmq.deliveryMode = ################################################## pulsarmq.serverUrl = pulsarmq.roleToken = -pulsarmq.topicTenantPrefix = \ No newline at end of file +pulsarmq.topicTenantPrefix = diff --git a/deployer/src/main/resources/example/instance.properties b/deployer/src/main/resources/example/instance.properties index c41affe0ab..68f4a3a841 100644 --- a/deployer/src/main/resources/example/instance.properties +++ b/deployer/src/main/resources/example/instance.properties @@ -49,10 +49,14 @@ canal.instance.filter.black.regex=mysql\\.slave_.* # mq config canal.mq.topic=example # dynamic topic route by schema or table regex -#canal.mq.dynamicTopic=mytest1.user,mytest2\\..*,.*\\..* +#canal.mq.dynamicTopic=mytest1.user,topic2:mytest2\\..*,.*\\..* canal.mq.partition=0 # hash partition config +#canal.mq.enableDynamicQueuePartition=false #canal.mq.partitionsNum=3 -#canal.mq.partitionHash=test.table:id^name,.*\\..* #canal.mq.dynamicTopicPartitionNum=test.*:4,mycanal:6 +#canal.mq.partitionHash=test.table:id^name,.*\\..* +# +# multi stream for polardbx +canal.instance.multi.stream.on=false ################################################# diff --git a/deployer/src/main/resources/spring/default-instance.xml b/deployer/src/main/resources/spring/default-instance.xml index 9907ae80ba..6fe11744d9 100644 --- a/deployer/src/main/resources/spring/default-instance.xml +++ b/deployer/src/main/resources/spring/default-instance.xml @@ -185,6 +185,9 @@ + + + @@ -197,6 +200,7 @@ + diff --git a/deployer/src/main/resources/spring/file-instance.xml b/deployer/src/main/resources/spring/file-instance.xml index 800f981284..738d19c91c 100644 --- a/deployer/src/main/resources/spring/file-instance.xml +++ b/deployer/src/main/resources/spring/file-instance.xml @@ -171,6 +171,9 @@ + + + @@ -183,6 +186,7 @@ + diff --git a/deployer/src/main/resources/spring/group-instance.xml b/deployer/src/main/resources/spring/group-instance.xml index b0b887f9fd..8b4fc427f6 100644 --- a/deployer/src/main/resources/spring/group-instance.xml +++ b/deployer/src/main/resources/spring/group-instance.xml @@ -170,6 +170,7 @@ + @@ -278,6 +279,7 @@ + diff --git a/deployer/src/main/resources/spring/memory-instance.xml b/deployer/src/main/resources/spring/memory-instance.xml index a7dc634c17..5c5c6d377d 100644 --- a/deployer/src/main/resources/spring/memory-instance.xml +++ b/deployer/src/main/resources/spring/memory-instance.xml @@ -159,6 +159,9 @@ + + + @@ -171,6 +174,7 @@ + diff --git a/docker/Dockerfile b/docker/Dockerfile index dd82544b32..c497a72d6c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,6 +1,7 @@ -FROM canal/osbase:v2 +FROM canal/osbase:v3-amd64 +#FROM canal/osbase:v3-arm64 -MAINTAINER agapple (jianghang115@gmail.com) +LABEL agapple (jianghang115@gmail.com) # install canal COPY image/ /tmp/docker/ @@ -19,7 +20,7 @@ RUN \ /bin/rm -f /home/admin/canal.deployer-*.tar.gz && \ tar zxvf /tmp/node_exporter.tar.gz -C /home/admin && \ - ln -s /home/admin/node_exporter-0.18.1.linux-arm64 /home/admin/node_exporter && \ + ln -s /home/admin/node_exporter-1.6.1* /home/admin/node_exporter && \ mkdir -p home/admin/canal-server/logs && \ chmod +x /home/admin/*.sh && \ @@ -34,4 +35,4 @@ EXPOSE 11110 11111 11112 9100 WORKDIR /home/admin ENTRYPOINT [ "/alidata/bin/main.sh" ] -CMD [ "/home/admin/app.sh" ] \ No newline at end of file +CMD [ "/home/admin/app.sh" ] diff --git a/docker/Dockerfile_admin b/docker/Dockerfile_admin index aee598e937..abc7e7427f 100644 --- a/docker/Dockerfile_admin +++ b/docker/Dockerfile_admin @@ -1,4 +1,5 @@ -FROM canal/osadmin:v1 +FROM canal/osadmin:v3-amd64 +#FROM canal/osadmin:v3-arm64 MAINTAINER agapple (jianghang115@gmail.com) diff --git a/docker/base/Dockerfile_admin b/docker/base/Dockerfile_admin deleted file mode 100644 index bf730bd44e..0000000000 --- a/docker/base/Dockerfile_admin +++ /dev/null @@ -1,11 +0,0 @@ -FROM canal/osbase:v1 - -MAINTAINER agapple (jianghang115@gmail.com) - -RUN \ - groupadd -r mysql && useradd -r -g mysql mysql && \ - yum -y install wget mysql-server --nogpgcheck && \ - yum clean all && \ - true - -CMD ["/bin/bash"] \ No newline at end of file diff --git a/docker/base/Dockerfile_v2 b/docker/base/Dockerfile_v2 deleted file mode 100644 index 7de9ead1f5..0000000000 --- a/docker/base/Dockerfile_v2 +++ /dev/null @@ -1,11 +0,0 @@ -FROM canal/osbase:v1 - -MAINTAINER agapple (jianghang115@gmail.com) - -env NODE_EPORTER_LINK="https://github.com/prometheus/node_exporter/releases/download/v0.18.1/node_exporter-0.18.1.linux-arm64.tar.gz" - -RUN \ - wget "$NODE_EPORTER_LINK" -O /tmp/node_exporter.tar.gz && \ - true - -CMD ["/bin/bash"] \ No newline at end of file diff --git a/docker/base/README.md b/docker/base/README.md new file mode 100644 index 0000000000..79853abf05 --- /dev/null +++ b/docker/base/README.md @@ -0,0 +1,6 @@ + +# osbase +cd amd64 && docker build --no-cache -t canal/osbase ./ -f Dockerfile + +# osadmin +cd amd64 && docker build --no-cache -t canal/osadmin ./ -f ./Dockerfile_admin diff --git a/docker/base/amd64/Dockerfile b/docker/base/amd64/Dockerfile new file mode 100644 index 0000000000..845fa7328d --- /dev/null +++ b/docker/base/amd64/Dockerfile @@ -0,0 +1,60 @@ +# amd64 +FROM centos:centos7.9.2009@sha256:dead07b4d8ed7e29e98de0f4504d87e8880d4347859d839686a31da35a3b532f + +ARG jdk_rpm +ARG platform_env + +MAINTAINER agapple (jianghang115@gmail.com) +# env DOWNLOAD_AMD_LINK="https://download.oracle.com/otn/java/jdk/8u361-b09/0ae14417abb444ebb02b9815e2103550/jdk-8u361-linux-x64.rpm" +# env DOWNLOAD_ARM_LINK="https://download.oracle.com/otn/java/jdk/8u361-b09/0ae14417abb444ebb02b9815e2103550/jdk-8u361-linux-aarch64.rpm" +env NODE_EPORTER_LINK="https://github.com/prometheus/node_exporter/releases/download/v1.6.1/node_exporter-1.6.1.linux-amd64.tar.gz" + +# install system +# update yum config, fix "centos6.x yum install failure && Determining fastest mirrors slow" problems +COPY yum/ /tmp/ +RUN \ + /bin/cp /etc/yum.repos.d/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo.bak && \ + /bin/cp -f /tmp/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo && \ + /bin/cp /etc/yum/pluginconf.d/fastestmirror.conf /etc/yum/pluginconf.d/fastestmirror.conf.bak && \ + awk '{ if($0=="enabled=1"){print "enabled=0";} else{print $0;} }' /etc/yum/pluginconf.d/fastestmirror.conf.bak > /etc/yum/pluginconf.d/fastestmirror.conf && \ + /bin/cp /etc/yum.conf /etc/yum.conf.bak && \ + echo 'minrate=1' >> /etc/yum.conf && echo 'timeout=600' >> /etc/yum.conf && \ + yum clean all && yum makecache + +RUN \ + /bin/cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \ + echo 'root:Hello1234' | chpasswd && \ + groupadd -r admin && useradd -g admin admin && \ + yum install -y man && \ + yum install -y dstat && \ + yum install -y unzip && \ + yum install -y nc && \ + yum install -y openssh-server && \ + yum install -y tar && \ + yum install -y which && \ + yum install -y wget && \ + yum install -y perl && \ + yum install -y file && \ + ssh-keygen -q -N "" -t dsa -f /etc/ssh/ssh_host_dsa_key && \ + ssh-keygen -q -N "" -t rsa -f /etc/ssh/ssh_host_rsa_key && \ + sed -ri 's/session required pam_loginuid.so/#session required pam_loginuid.so/g' /etc/pam.d/sshd && \ + sed -i -e 's/^#Port 22$/Port 2222/' /etc/ssh/sshd_config && \ + mkdir -p /root/.ssh && chown root.root /root && chmod 700 /root/.ssh && \ + yum install -y cronie && \ + sed -i '/session required pam_loginuid.so/d' /etc/pam.d/crond && \ + true + +COPY ${jdk_rpm} /tmp/ +RUN \ + touch /var/lib/rpm/* && \ + #if [ "$BUILDPLATFORM" == "linux/amd64" ] ; then wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=xxx; oraclelicense=accept-securebackup-cookie" "$DOWNLOAD_AMD_LINK" -O /tmp/jdk-8-linux.rpm ; fi && \ + #if [ "$BUILDPLATFORM" == "linux/arm64" ] ; then wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=xxx; oraclelicense=accept-securebackup-cookie" "$DOWNLOAD_ARM_LINK" -O /tmp/jdk-8-linux.rpm ; fi && \ + yum -y install /tmp/jdk-*.rpm && \ + /bin/rm -f /tmp/jdk-*.rpm && \ + echo "export JAVA_HOME=/usr/java/latest" >> /etc/profile && \ + echo "export PATH=\$JAVA_HOME/bin:\$PATH" >> /etc/profile && \ + wget "$NODE_EPORTER_LINK" -O /tmp/node_exporter.tar.gz && \ + yum clean all && \ + true + +CMD ["/bin/bash"] diff --git a/docker/base/amd64/Dockerfile_admin b/docker/base/amd64/Dockerfile_admin new file mode 100644 index 0000000000..e1e3a68029 --- /dev/null +++ b/docker/base/amd64/Dockerfile_admin @@ -0,0 +1,15 @@ +# amd64 +FROM canal/osbase:v3-amd64 + +MAINTAINER agapple (jianghang115@gmail.com) + +RUN \ + groupadd -r mysql && useradd -r -g mysql mysql && \ + wget https://dev.mysql.com/get/mysql80-community-release-el7-3.noarch.rpm && \ + rpm -ivh mysql80-community-release-el7-3.noarch.rpm && \ + yum install sudo mysql-community-server --nogpgcheck -y && \ + rm -f mysql80-community-release-el7-3.noarch.rpm && \ + yum clean all && \ + true + +CMD ["/bin/bash"] diff --git a/docker/base/amd64/yum/CentOS-Base.repo b/docker/base/amd64/yum/CentOS-Base.repo new file mode 100644 index 0000000000..1740201d83 --- /dev/null +++ b/docker/base/amd64/yum/CentOS-Base.repo @@ -0,0 +1,43 @@ +# CentOS-Base.repo +# +# The mirror system uses the connecting IP address of the client and the +# update status of each mirror to pick mirrors that are updated to and +# geographically close to the client. You should use this for CentOS updates +# unless you are manually picking other mirrors. +# +# If the mirrorlist= does not work for you, as a fall back you can try the +# remarked out baseurl= line instead. +# +# + +[base] +name=CentOS-7 - Base +mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=os&infra=$infra +#baseurl=http://mirror.centos.org/centos/7/os/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + +#released updates +[updates] +name=CentOS-7 - Updates +mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=updates&infra=$infra +#baseurl=http://mirror.centos.org/centos/7/updates/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + +#additional packages that may be useful +[extras] +name=CentOS-7 - Extras +mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=extras&infra=$infra +#baseurl=http://mirror.centos.org/centos/7/extras/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + +#additional packages that extend functionality of existing packages +[centosplus] +name=CentOS-7 - Plus +mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=centosplus&infra=$infra +#baseurl=http://mirror.centos.org/centos/7/centosplus/$basearch/ +gpgcheck=1 +enabled=0 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 diff --git a/docker/base/Dockerfile b/docker/base/arm64/Dockerfile similarity index 72% rename from docker/base/Dockerfile rename to docker/base/arm64/Dockerfile index 4000bc7c8f..0eb5e6ea83 100644 --- a/docker/base/Dockerfile +++ b/docker/base/arm64/Dockerfile @@ -1,10 +1,15 @@ -FROM centos:centos6.10 +# arm64 +FROM centos:centos7.9.2009@sha256:73f11afcbb50d8bc70eab9f0850b3fa30e61a419bc48cf426e63527d14a8373b + +ARG jdk_rpm +ARG platform_env MAINTAINER agapple (jianghang115@gmail.com) +# env DOWNLOAD_AMD_LINK="https://download.oracle.com/otn/java/jdk/8u361-b09/0ae14417abb444ebb02b9815e2103550/jdk-8u361-linux-x64.rpm" +# env DOWNLOAD_ARM_LINK="https://download.oracle.com/otn/java/jdk/8u361-b09/0ae14417abb444ebb02b9815e2103550/jdk-8u361-linux-aarch64.rpm" +env NODE_EPORTER_LINK="https://github.com/prometheus/node_exporter/releases/download/v1.6.1/node_exporter-1.6.1.linux-arm64.tar.gz" -env DOWNLOAD_LINK="https://download.oracle.com/otn-pub/java/jdk/8u281-b09/89d678f2be164786b292527658ca1605/jdk-8u281-linux-x64.rpm" # install system - # update yum config, fix "centos6.x yum install failure && Determining fastest mirrors slow" problems COPY yum/ /tmp/ RUN \ @@ -39,13 +44,14 @@ RUN \ sed -i '/session required pam_loginuid.so/d' /etc/pam.d/crond && \ true +COPY ${jdk_rpm} /tmp/ RUN \ - touch /var/lib/rpm/* && \ - wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=xxx; oraclelicense=accept-securebackup-cookie" "$DOWNLOAD_LINK" -O /tmp/jdk-8-linux-x64.rpm && \ - yum -y install /tmp/jdk-8-linux-x64.rpm && \ - /bin/rm -f /tmp/jdk-8-linux-x64.rpm && \ + touch /var/lib/rpm/* && \ + yum -y install /tmp/jdk-*.rpm && \ + /bin/rm -f /tmp/jdk-*.rpm && \ echo "export JAVA_HOME=/usr/java/latest" >> /etc/profile && \ echo "export PATH=\$JAVA_HOME/bin:\$PATH" >> /etc/profile && \ + wget "$NODE_EPORTER_LINK" -O /tmp/node_exporter.tar.gz && \ yum clean all && \ true diff --git a/docker/base/arm64/Dockerfile_admin b/docker/base/arm64/Dockerfile_admin new file mode 100644 index 0000000000..5f9cdf3729 --- /dev/null +++ b/docker/base/arm64/Dockerfile_admin @@ -0,0 +1,15 @@ +# arm64 +FROM canal/osbase:v3-arm64 + +MAINTAINER agapple (jianghang115@gmail.com) + +RUN \ + groupadd -r mysql && useradd -r -g mysql mysql && \ + wget https://dev.mysql.com/get/mysql80-community-release-el7-3.noarch.rpm && \ + rpm -ivh mysql80-community-release-el7-3.noarch.rpm && \ + yum install sudo mysql-community-server-8.0.32 --nogpgcheck -y && \ + rm -f mysql80-community-release-el7-3.noarch.rpm && \ + yum clean all && \ + true + +CMD ["/bin/bash"] diff --git a/docker/base/arm64/yum/CentOS-Base.repo b/docker/base/arm64/yum/CentOS-Base.repo new file mode 100644 index 0000000000..ee1b93b919 --- /dev/null +++ b/docker/base/arm64/yum/CentOS-Base.repo @@ -0,0 +1,48 @@ +# CentOS-Base.repo +# +# The mirror system uses the connecting IP address of the client and the +# update status of each mirror to pick mirrors that are updated to and +# geographically close to the client. You should use this for CentOS updates +# unless you are manually picking other mirrors. +# +# If the mirrorlist= does not work for you, as a fall back you can try the +# remarked out baseurl= line instead. +# +# + +[base] +name=CentOS-$releasever - Base +mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os&infra=$infra +#baseurl=http://mirror.centos.org/altarch/$releasever/os/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7-aarch64 + +#released updates +[updates] +name=CentOS-$releasever - Updates +mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=updates&infra=$infra +#baseurl=http://mirror.centos.org/altarch/$releasever/updates/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7-aarch64 + +#additional packages that may be useful +[extras] +name=CentOS-$releasever - Extras +mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=extras&infra=$infra +#baseurl=http://mirror.centos.org/altarch/$releasever/extras/$basearch/ +gpgcheck=1 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7-aarch64 +enabled=1 + +#additional packages that extend functionality of existing packages +[centosplus] +name=CentOS-$releasever - Plus +mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=centosplus&infra=$infra +#baseurl=http://mirror.centos.org/altarch/$releasever/centosplus/$basearch/ +gpgcheck=1 +enabled=0 +gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 + file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7-aarch64 diff --git a/docker/base/yum/CentOS-Base.repo b/docker/base/yum/CentOS-Base.repo deleted file mode 100644 index 63ff498cf4..0000000000 --- a/docker/base/yum/CentOS-Base.repo +++ /dev/null @@ -1,66 +0,0 @@ -# CentOS-Base.repo -# -# The mirror system uses the connecting IP address of the client and the -# update status of each mirror to pick mirrors that are updated to and -# geographically close to the client. You should use this for CentOS updates -# unless you are manually picking other mirrors. -# -# If the mirrorlist= does not work for you, as a fall back you can try the -# remarked out baseurl= line instead. -# - -[base] -name=CentOS-$releasever - Base -failovermethod=priority -#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os&infra=$infra -baseurl=https://mirrors.aliyun.com/centos-vault/centos/$releasever/os/$basearch/ - https://mirrors.cloud.tencent.com/centos/$releasever/os/$basearch/ - https://vault.centos.org/centos/$releasever/os/$basearch/ -gpgcheck=1 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6 - -#released updates -[updates] -name=CentOS-$releasever - Updates -failovermethod=priority -#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=updates&infra=$infra -baseurl=https://mirrors.aliyun.com/centos-vault/centos/$releasever/updates/$basearch/ - https://mirrors.cloud.tencent.com/centos/$releasever/updates/$basearch/ - https://vault.centos.org/centos/$releasever/updates/$basearch/ -gpgcheck=1 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6 - -#additional packages that may be useful -[extras] -name=CentOS-$releasever - Extras -failovermethod=priority -#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=extras&infra=$infra -baseurl=https://mirrors.aliyun.com/centos-vault/centos/$releasever/extras/$basearch/ - https://mirrors.cloud.tencent.com/centos/$releasever/extras/$basearch/ - https://vault.centos.org/centos/$releasever/extras/$basearch/ -gpgcheck=1 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6 - -#additional packages that extend functionality of existing packages -[centosplus] -name=CentOS-$releasever - Plus -failovermethod=priority -#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=centosplus&infra=$infra -baseurl=https://mirrors.aliyun.com/centos-vault/centos/$releasever/centosplus/$basearch/ - https://mirrors.cloud.tencent.com/centos/6.0/centosplus/$basearch/ - https://vault.centos.org/centos/$releasever/centosplus/$basearch/ -gpgcheck=1 -enabled=0 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6 - -#contrib - packages by Centos Users -[contrib] -name=CentOS-$releasever - Contrib -failovermethod=priority -#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=contrib&infra=$infra -baseurl=https://mirrors.aliyun.com/centos-vault/centos/$releasever/contrib/$basearch/ - https://mirrors.cloud.tencent.com/centos/$releasever/contrib/$basearch/ - https://vault.centos.org/centos/$releasever/contrib/$basearch/ -gpgcheck=1 -enabled=0 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6 \ No newline at end of file diff --git a/docker/build.sh b/docker/build.sh index f49c314703..7df4c52daf 100644 --- a/docker/build.sh +++ b/docker/build.sh @@ -14,20 +14,16 @@ case "`uname`" in esac BASE=${bin_abs_path} -if [ "$1" == "base" ] ; then - docker build --no-cache -t canal/osbase $BASE/base -elif [ "$1" == "base_v2" ] ; then - docker build --no-cache -t canal/osbase $BASE/base -f $BASE/base/Dockerfile_v2 -elif [ "$1" == "base_admin" ] ; then - docker build --no-cache -t canal/osadmin $BASE/base -f $BASE/base/Dockerfile_admin -elif [ "$1" == "admin" ] ; then +if [ "$1" == "admin" ] ; then rm -rf $BASE/canal.*.tar.gz ; cd $BASE/../ && mvn clean package -Dmaven.test.skip -Denv=release && cd $current_path ; cp $BASE/../target/canal.admin-*.tar.gz $BASE/ docker build --no-cache -t canal/canal-admin $BASE/ -f $BASE/Dockerfile_admin + # docker build --platform linux/arm64 --no-cache -t canal/canal-admin $BASE/ -f $BASE/Dockerfile_admin else rm -rf $BASE/canal.*.tar.gz ; cd $BASE/../ && mvn clean package -Dmaven.test.skip -Denv=release && cd $current_path ; cp $BASE/../target/canal.deployer-*.tar.gz $BASE/ docker build --no-cache -t canal/canal-server $BASE/ + # docker build -platform linux/arm64 --no-cache -t canal/canal-server $BASE/ fi diff --git a/docker/image/alidata/bin/main.sh b/docker/image/alidata/bin/main.sh index a57551a73b..256e7bcd5f 100755 --- a/docker/image/alidata/bin/main.sh +++ b/docker/image/alidata/bin/main.sh @@ -12,8 +12,8 @@ for e in $(ls /alidata/init/*) ; do done echo "==> INIT DEFAULT" -service sshd start -service crond start +systemctl start sshd +systemctl start crond #echo "check hostname -i: `hostname -i`" #hti_num=`hostname -i|awk '{print NF}'` @@ -24,4 +24,4 @@ service crond start echo "==> INIT DONE" echo "==> RUN ${*}" -exec "${@}" \ No newline at end of file +exec "${@}" diff --git a/docker/image/app.sh b/docker/image/app.sh index 2ef1900c2c..821cd58ea4 100755 --- a/docker/image/app.sh +++ b/docker/image/app.sh @@ -1,5 +1,5 @@ #!/bin/bash -set -e +set +e source /etc/profile export JAVA_HOME=/usr/java/latest @@ -87,7 +87,7 @@ function start_canal() { su admin -c 'cd /home/admin/canal-server/bin/ && sh restart.sh local 1>>/tmp/start.log 2>&1' sleep 5 #check start - checkStart "canal" "nc 127.0.0.1 $adminPort -w 1 -z | wc -l" 30 + checkStart "canal" "nc 127.0.0.1 $adminPort -w 1 -zv 2> /tmp/nc.out && cat /tmp/nc.out | grep -c Connected" 30 else metricsPort=`perl -le 'print $ENV{"canal.metrics.pull.port"}'` if [ -z "$metricsPort" ] ; then @@ -95,9 +95,20 @@ function start_canal() { fi destination=`perl -le 'print $ENV{"canal.destinations"}'` - if [[ "$destination" =~ ',' ]]; then - echo "multi destination:$destination is not support" - exit 1; + destinationExpr=`perl -le 'print $ENV{"canal.destinations.expr"}'` + multistream=`perl -le 'print $ENV{"canal.instance.multi.stream.on"}'` + + if [[ "$destination" =~ ',' ]] || [[ -n "$destinationExpr" ]]; then + if [[ "$multistream" = 'true' ]] ; then + if [[ -n "$destinationExpr" ]] ; then + splitDestinations '1' $destinationExpr + else + splitDestinations '2' $destination + fi + else + echo "multi destination is not support, destinationExpr:$destinationExpr, destinations:$destination" + exit 1; + fi else if [ "$destination" != "" ] && [ "$destination" != "example" ] ; then if [ -d /home/admin/canal-server/conf/example ]; then @@ -109,10 +120,38 @@ function start_canal() { su admin -c 'cd /home/admin/canal-server/bin/ && sh restart.sh 1>>/tmp/start.log 2>&1' sleep 5 #check start - checkStart "canal" "nc 127.0.0.1 $metricsPort -w 1 -z | wc -l" 30 + checkStart "canal" "nc 127.0.0.1 $metricsPort -w 1 -zv 2> /tmp/nc.out && cat /tmp/nc.out | grep -c Connected" 30 fi } +function splitDestinations() { + holdExample="false" + prefix='' + array=() + + if [[ "$1" == '1' ]] ; then + echo "split destinations expr "$2 + prefix=$(echo $2 | sed 's/{.*//') + num=$(echo $2 | sed 's/.*{//;s/}//;s/-/ /') + array=($(seq $num)) + else + echo "split destinations "$2 + array=(${2//,/ }) + fi + + for var in ${array[@]} + do + cp -r /home/admin/canal-server/conf/example /home/admin/canal-server/conf/$prefix$var + chown admin:admin -R /home/admin/canal-server/conf/$prefix$var + if [[ "$prefix$var" = 'example' ]] ; then + holdExample="true" + fi + done + if [[ "$holdExample" != 'true' ]] ; then + rm -rf /home/admin/canal-server/conf/example + fi +} + function stop_canal() { echo "stop canal" su admin -c 'cd /home/admin/canal-server/bin/ && sh stop.sh 1>>/tmp/start.log 2>&1' diff --git a/docker/image/app_admin.sh b/docker/image/app_admin.sh index 156b9f0d54..dd02632c73 100755 --- a/docker/image/app_admin.sh +++ b/docker/image/app_admin.sh @@ -1,5 +1,5 @@ #!/bin/bash -set -e +set +e source /etc/profile export JAVA_HOME=/usr/java/latest @@ -94,37 +94,45 @@ function start_mysql() { MYSQL_ROOT_PASSWORD=Hello1234 # connect local mysql if [ -z "$(ls -A /var/lib/mysql)" ]; then - mysql_install_db --user=mysql --datadir=/var/lib/mysql 1>>/tmp/start.log 2>&1 - # These statements _must_ be on individual lines, and _must_ end with - # semicolons (no line breaks or comments are permitted). - # TODO proper SQL escaping on ALL the things D: TEMP_FILE='/tmp/init.sql' - echo "update mysql.user set password=password('${MYSQL_ROOT_PASSWORD}') where user='root';" >> $TEMP_FILE - echo "grant all privileges on *.* to 'root'@'%' WITH GRANT OPTION ;" >> $TEMP_FILE + echo "ALTER USER 'root'@'localhost' IDENTIFIED BY '${MYSQL_ROOT_PASSWORD}';" >> $TEMP_FILE + /usr/sbin/mysqld --initialize --user=mysql --datadir=/var/lib/mysql --init-file=/tmp/init.sql --default-authentication-plugin=mysql_native_password 1>>/tmp/start.log 2>&1 + echo "default-authentication-plugin=mysql_native_password" >> /etc/my.cnf + # systemctl start mysqld + sudo -u mysql /usr/sbin/mysqld & + sleep 5 + checkStart "mysql" "echo 'show status' | mysql -b -s -h127.0.0.1 -P3306 -uroot -p${MYSQL_ROOT_PASSWORD} | grep -c Uptime" 30 + # init file + rm -f $TEMP_FILE echo "create database if not exists $MYSQL_DATABASE ;" >> $TEMP_FILE echo "create user $MYSQL_USER identified by '$MYSQL_USER_PASSWORD' ;" >> $TEMP_FILE - echo "grant all privileges on $MYSQL_DATABASE.* to '$MYSQL_USER'@'%' identified by '$MYSQL_USER_PASSWORD' ;" >> $TEMP_FILE - echo "grant all privileges on $MYSQL_DATABASE.* to '$MYSQL_USER'@'localhost' identified by '$MYSQL_USER_PASSWORD' ;" >> $TEMP_FILE + echo "grant all privileges on $MYSQL_DATABASE.* to '$MYSQL_USER'@'%' ;" >> $TEMP_FILE echo "flush privileges;" >> $TEMP_FILE - service mysqld start - checkStart "mysql" "echo 'show status' | mysql -s -h127.0.0.1 -P3306 -uroot | grep -c Uptime" 30 - mysql -h127.0.0.1 -uroot -e "source $TEMP_FILE" 1>>/tmp/start.log 2>&1 - + # init user + cmd="mysql -h127.0.0.1 -uroot -p${MYSQL_ROOT_PASSWORD} -e 'source $TEMP_FILE' 1>>/tmp/start.log 2>&1" + eval $cmd + /bin/rm -f /tmp/init.sql + # init table cmd="mysql -h127.0.0.1 -u$MYSQL_USER -p$MYSQL_USER_PASSWORD $MYSQL_DATABASE -e 'source /home/admin/canal-admin/conf/canal_manager.sql' 1>>/tmp/start.log 2>&1" eval $cmd /bin/rm -f /home/admin/canal-admin/conf/canal_manager.sql else + echo "recover mysql ..." chown -R mysql:mysql /var/lib/mysql - service mysqld start + # systemctl start mysqld + rm -f /var/lib/mysql/mysql.sock.lock + sudo -u mysql /usr/sbin/mysqld & + sleep 5 #check start - checkStart "mysql" "echo 'show status' | mysql -b -s -h127.0.0.1 -P3306 -uroot -p$MYSQL_ROOT_PASSWORD | grep -c Uptime" 30 + checkStart "mysql" "echo 'show status' | mysql -b -s -h127.0.0.1 -P3306 -uroot -p${MYSQL_ROOT_PASSWORD} | grep -c Uptime" 30 fi } function stop_mysql() { echo "stop mysql ..." # stop mysql - service mysqld stop + # systemctl stop mysqld + ps auxf | grep mysqld | grep -v grep | awk '{print $2}' | xargs kill echo "stop mysql successful ..." } @@ -137,7 +145,7 @@ function start_admin() { su admin -c 'cd /home/admin/canal-admin/bin/ && sh restart.sh 1>>/tmp/start.log 2>&1' sleep 5 #check start - checkStart "canal" "nc 127.0.0.1 $serverPort -w 1 -z | wc -l" 30 + checkStart "canal" "nc 127.0.0.1 $serverPort -w 1 -zv 2>/tmp/nc.out && cat /tmp/nc.out | grep -c Connected" 30 } function stop_admin() { diff --git a/docker/run.sh b/docker/run.sh index 1675191107..da1cad866c 100644 --- a/docker/run.sh +++ b/docker/run.sh @@ -97,6 +97,6 @@ fi MEMORY="-m 4096m" LOCALHOST=`getMyIp` -cmd="docker run -d -it -h $LOCALHOST $CONFIG --name=canal-server $VOLUMNS $NET_MODE $PORTS $MEMORY canal/canal-server" +cmd="docker run -d --privileged=true -it -h $LOCALHOST $CONFIG --name=canal-server $VOLUMNS $NET_MODE $PORTS $MEMORY canal/canal-server" echo $cmd -eval $cmd \ No newline at end of file +eval $cmd diff --git a/docker/run_admin.sh b/docker/run_admin.sh index 75034eea60..b5aed3c2e8 100644 --- a/docker/run_admin.sh +++ b/docker/run_admin.sh @@ -86,6 +86,6 @@ fi MEMORY="-m 1024m" LOCALHOST=`getMyIp` -cmd="docker run -d -it -h $LOCALHOST $CONFIG --name=canal-admin $VOLUMNS $NET_MODE $PORTS $MEMORY canal/canal-admin" +cmd="docker run -d --privileged=true -it -h $LOCALHOST $CONFIG --name=canal-admin $VOLUMNS $NET_MODE $PORTS $MEMORY canal/canal-admin" echo $cmd eval $cmd diff --git a/driver/pom.xml b/driver/pom.xml index 89d2648306..6584e4eaee 100644 --- a/driver/pom.xml +++ b/driver/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter @@ -33,6 +33,10 @@ org.slf4j slf4j-api + + org.jboss.netty + netty + junit diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlConnector.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlConnector.java index 4cdd0af8c6..4850e249cf 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlConnector.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlConnector.java @@ -13,11 +13,7 @@ import com.alibaba.otter.canal.parse.driver.mysql.packets.client.AuthSwitchResponsePacket; import com.alibaba.otter.canal.parse.driver.mysql.packets.client.ClientAuthenticationPacket; import com.alibaba.otter.canal.parse.driver.mysql.packets.client.QuitCommandPacket; -import com.alibaba.otter.canal.parse.driver.mysql.packets.server.AuthSwitchRequestMoreData; -import com.alibaba.otter.canal.parse.driver.mysql.packets.server.AuthSwitchRequestPacket; -import com.alibaba.otter.canal.parse.driver.mysql.packets.server.ErrorPacket; -import com.alibaba.otter.canal.parse.driver.mysql.packets.server.HandshakeInitializationPacket; -import com.alibaba.otter.canal.parse.driver.mysql.packets.server.Reply323Packet; +import com.alibaba.otter.canal.parse.driver.mysql.packets.server.*; import com.alibaba.otter.canal.parse.driver.mysql.socket.SocketChannel; import com.alibaba.otter.canal.parse.driver.mysql.socket.SocketChannelPool; import com.alibaba.otter.canal.parse.driver.mysql.utils.MSC; @@ -26,7 +22,7 @@ /** * 基于mysql socket协议的链接实现 - * + * * @author jianghang 2013-2-18 下午09:22:30 * @version 1.0.1 */ @@ -220,53 +216,46 @@ private void negotiate(SocketChannel channel) throws IOException { packet.fromBytes(body); authData = packet.authData; pluginName = packet.authName; + logger.info("auth switch pluginName is {}.", pluginName); } - boolean isSha2Password = false; byte[] encryptedPassword = null; - if (pluginName != null && "mysql_native_password".equals(pluginName)) { + if ("mysql_clear_password".equals(pluginName)) { + encryptedPassword = getPassword().getBytes(); + header = authSwitchAfterAuth(encryptedPassword, header); + body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); + } else if ("mysql_native_password".equals(pluginName)) { try { encryptedPassword = MySQLPasswordEncrypter.scramble411(getPassword().getBytes(), authData); } catch (NoSuchAlgorithmException e) { throw new RuntimeException("can't encrypt password that will be sent to MySQL server.", e); } - } else if (pluginName != null && "caching_sha2_password".equals(pluginName)) { - isSha2Password = true; + header = authSwitchAfterAuth(encryptedPassword, header); + body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); + } else if ("caching_sha2_password".equals(pluginName)) { + byte[] scramble = authData; try { - encryptedPassword = MySQLPasswordEncrypter.scrambleCachingSha2(getPassword().getBytes(), authData); + encryptedPassword = MySQLPasswordEncrypter.scrambleCachingSha2(getPassword().getBytes(), scramble); } catch (DigestException e) { throw new RuntimeException("can't encrypt password that will be sent to MySQL server.", e); } - } - assert encryptedPassword != null; - AuthSwitchResponsePacket responsePacket = new AuthSwitchResponsePacket(); - responsePacket.authData = encryptedPassword; - byte[] auth = responsePacket.toBytes(); - - h = new HeaderPacket(); - h.setPacketBodyLength(auth.length); - h.setPacketSequenceNumber((byte) (header.getPacketSequenceNumber() + 1)); - PacketManager.writePkg(channel, h.toBytes(), auth); - logger.info("auth switch response packet is sent out."); - - header = null; - header = PacketManager.readHeader(channel, 4); - body = null; - body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); - assert body != null; - if (isSha2Password) { + header = authSwitchAfterAuth(encryptedPassword, header); + body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); + assert body != null; if (body[0] == 0x01 && body[1] == 0x04) { - // password auth failed - throw new IOException("caching_sha2_password Auth failed"); + // fixed issue https://github.com/alibaba/canal/pull/4767, support mysql 8.0.30+ + header = cachingSha2PasswordFullAuth(channel, header, getPassword().getBytes(), scramble); + body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); + } else { + header = PacketManager.readHeader(channel, 4); + body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); } - - header = null; - header = PacketManager.readHeader(channel, 4); - body = null; + } else { + header = authSwitchAfterAuth(encryptedPassword, header); body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); } } - + assert body != null; if (body[0] < 0) { if (body[0] == -1) { ErrorPacket err = new ErrorPacket(); @@ -278,6 +267,62 @@ private void negotiate(SocketChannel channel) throws IOException { } } + private HeaderPacket cachingSha2PasswordFullAuth(SocketChannel channel, HeaderPacket header, byte[] pass, + byte[] seed) throws IOException { + AuthSwitchResponsePacket responsePacket = new AuthSwitchResponsePacket(); + responsePacket.authData = new byte[] { 2 }; + byte[] auth = responsePacket.toBytes(); + HeaderPacket h = new HeaderPacket(); + h.setPacketBodyLength(auth.length); + h.setPacketSequenceNumber((byte) (header.getPacketSequenceNumber() + 1)); + PacketManager.writePkg(channel, h.toBytes(), auth); + logger.info("caching sha2 password fullAuth request public key packet is sent out."); + + header = PacketManager.readHeader(channel, 4); + byte[] body = PacketManager.readBytes(channel, header.getPacketBodyLength(), timeout); + AuthSwitchRequestMoreData packet = new AuthSwitchRequestMoreData(); + packet.fromBytes(body); + if (packet.status != 0x01) { + throw new IOException("caching_sha2_password get public key failed"); + } + logger.info("caching sha2 password fullAuth get server public key succeed."); + byte[] publicKeyBytes = packet.authData; + + byte[] encryptedPassword = null; + try { + encryptedPassword = MySQLPasswordEncrypter.scrambleRsa(publicKeyBytes, pass, seed); + } catch (Exception e) { + logger.error("rsa encrypt failed {}", publicKeyBytes); + throw new IOException("caching_sha2_password auth failed", e); + } + + // send auth + responsePacket = new AuthSwitchResponsePacket(); + responsePacket.authData = encryptedPassword; + auth = responsePacket.toBytes(); + h = new HeaderPacket(); + h.setPacketBodyLength(auth.length); + h.setPacketSequenceNumber((byte) (header.getPacketSequenceNumber() + 1)); + PacketManager.writePkg(channel, h.toBytes(), auth); + logger.info("caching sha2 password fullAuth response auth data packet is sent out."); + return PacketManager.readHeader(channel, 4); + } + + private HeaderPacket authSwitchAfterAuth(byte[] encryptedPassword, HeaderPacket header) throws IOException { + assert encryptedPassword != null; + AuthSwitchResponsePacket responsePacket = new AuthSwitchResponsePacket(); + responsePacket.authData = encryptedPassword; + byte[] auth = responsePacket.toBytes(); + + HeaderPacket h = new HeaderPacket(); + h.setPacketBodyLength(auth.length); + h.setPacketSequenceNumber((byte) (header.getPacketSequenceNumber() + 1)); + PacketManager.writePkg(channel, h.toBytes(), auth); + logger.info("auth switch response packet is sent out."); + header = PacketManager.readHeader(channel, 4); + return header; + } + private void auth323(SocketChannel channel, byte packetSequenceNumber, byte[] seed) throws IOException { // auth 323 Reply323Packet r323 = new Reply323Packet(); diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlQueryExecutor.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlQueryExecutor.java index b412a96b85..15210eb37e 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlQueryExecutor.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlQueryExecutor.java @@ -119,12 +119,14 @@ public List queryMulti(String queryString) throws IOException { fields.add(fp); } - moreResult = readEofPacket(); + readEofPacket(); List rowData = new ArrayList<>(); while (true) { body = readNextPacket(); if (body[0] == -2) { + EOFPacket packet = parseEOFPacket(body); + moreResult = (packet.statusFlag & 0x0008) != 0; break; } RowDataPacket rowDataPacket = new RowDataPacket(); @@ -146,13 +148,17 @@ public List queryMulti(String queryString) throws IOException { private boolean readEofPacket() throws IOException { byte[] eofBody = readNextPacket(); + EOFPacket packet = parseEOFPacket(eofBody); + return (packet.statusFlag & 0x0008) != 0; + } + + private EOFPacket parseEOFPacket(byte[] eofBody) throws IOException { EOFPacket packet = new EOFPacket(); packet.fromBytes(eofBody); if (eofBody[0] != -2) { throw new IOException("EOF Packet is expected, but packet with field_count=" + eofBody[0] + " is found."); } - - return (packet.statusFlag & 0x0008) != 0; + return packet; } protected byte[] readNextPacket() throws IOException { diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/MariaGTIDSet.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/MariaGTIDSet.java index c9b22c1ccf..4612b93916 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/MariaGTIDSet.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/MariaGTIDSet.java @@ -1,11 +1,11 @@ package com.alibaba.otter.canal.parse.driver.mysql.packets; -import org.apache.commons.lang.StringUtils; - import java.io.IOException; import java.util.HashMap; import java.util.Map; +import org.apache.commons.lang.StringUtils; + /** * 类 MariaGTIDSet.java 的实现 * @@ -13,8 +13,9 @@ * @version 1.0.0 */ public class MariaGTIDSet implements GTIDSet { + //MariaDB 10.0.2+ representation of Gtid - Map gtidMap = new HashMap<>(); + private Map gtidMap = new HashMap<>(); @Override public byte[] encode() throws IOException { diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/UUIDSet.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/UUIDSet.java index 5926a4fadf..0eaea9cd73 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/UUIDSet.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/packets/UUIDSet.java @@ -181,9 +181,7 @@ public static List combine(List intervals) { int j; for (j = i + 1; j < len; j++) { if (intervals.get(i).stop >= intervals.get(j).start) { - if (intervals.get(i).stop < intervals.get(j).stop) { - intervals.get(i).stop = intervals.get(j).stop; - } + intervals.get(i).stop = intervals.get(j).stop; } else { break; } diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/GtidUtil.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/GtidUtil.java index 3b4cc3d592..6315058a01 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/GtidUtil.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/GtidUtil.java @@ -15,7 +15,8 @@ public class GtidUtil { public static GTIDSet parseGtidSet(String gtid, boolean isMariaDB) { if (isMariaDB) { return MariaGTIDSet.parse(gtid); + } else { + return MysqlGTIDSet.parse(gtid); } - return MysqlGTIDSet.parse(gtid); } } diff --git a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/MySQLPasswordEncrypter.java b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/MySQLPasswordEncrypter.java index 9679fdc7d2..b6f8a60af4 100644 --- a/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/MySQLPasswordEncrypter.java +++ b/driver/src/main/java/com/alibaba/otter/canal/parse/driver/mysql/utils/MySQLPasswordEncrypter.java @@ -1,8 +1,13 @@ package com.alibaba.otter.canal.parse.driver.mysql.utils; -import java.security.DigestException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; +import java.security.*; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.X509EncodedKeySpec; + +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; +import javax.crypto.NoSuchPaddingException; public class MySQLPasswordEncrypter { @@ -85,6 +90,26 @@ public static String scramble323(String pass, String seed) { return new String(chars); } + public static final byte[] scrambleRsa(byte[] publicKeyBytes, byte[] pass, + byte[] seed) throws NoSuchAlgorithmException, InvalidKeySpecException, + NoSuchPaddingException, InvalidKeyException, + IllegalBlockSizeException, BadPaddingException { + byte[] input = new byte[pass.length + 1]; + System.arraycopy(pass, 0, input, 0, pass.length); + byte[] encryptedPassword = new byte[input.length]; + xorString(input, encryptedPassword, seed, input.length); + String publicKeyPem = new String(publicKeyBytes).replace("\n", "") + .replace("-----BEGIN PUBLIC KEY-----", "") + .replace("-----END PUBLIC KEY-----", ""); + byte[] certificateData = java.util.Base64.getDecoder().decode(publicKeyPem.getBytes()); + X509EncodedKeySpec keySpec = new X509EncodedKeySpec(certificateData); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PublicKey publicKey = keyFactory.generatePublic(keySpec); + Cipher cipher = Cipher.getInstance("RSA/ECB/OAEPWithSHA-1AndMGF1Padding"); + cipher.init(Cipher.ENCRYPT_MODE, publicKey); + return cipher.doFinal(encryptedPassword); + } + private static long[] hash(String src) { long nr = 1345345333L; long add = 7; diff --git a/driver/src/test/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlGTIDSetTest.java b/driver/src/test/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlGTIDSetTest.java index bf29f7d940..1c8ba0e961 100644 --- a/driver/src/test/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlGTIDSetTest.java +++ b/driver/src/test/java/com/alibaba/otter/canal/parse/driver/mysql/MysqlGTIDSetTest.java @@ -42,10 +42,9 @@ public void testUpdate() { MysqlGTIDSet mysqlGTIDSet1 = MysqlGTIDSet.parse(gtid1); String gtid2 = "726757ad-4455-11e8-ae04-0242ac110002:1-20304074"; - MysqlGTIDSet mysqlGTIDSet2 = MysqlGTIDSet.parse(gtid2); mysqlGTIDSet1.update(gtid2); - assertEquals("726757ad-4455-11e8-ae04-0242ac110002:1-25536412", mysqlGTIDSet1.toString()); + assertEquals("726757ad-4455-11e8-ae04-0242ac110002:1-20304074", mysqlGTIDSet1.toString()); } @Test diff --git a/example/pom.xml b/example/pom.xml index a5c8f099ed..ef3d610de2 100644 --- a/example/pom.xml +++ b/example/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter @@ -21,82 +21,43 @@ canal.protocol ${project.version} + - com.alibaba - druid + org.apache.rocketmq + rocketmq-client + ${rocketmq_version} - mysql - mysql-connector-java + org.apache.rocketmq + rocketmq-acl + ${rocketmq_version} - org.apache.ddlutils - ddlutils - 1.0 - - - commons-beanutils - commons-beanutils-core - - - commons-lang - commons-lang - - - commons-dbcp - commons-dbcp - - - commons-pool - commons-pool - - - commons-logging - commons-logging-api - - - dom4j - dom4j - - - stax - stax-api - - - commons-collections - commons-collections - - - commons-digester - commons-digester - - - commons-betwixt - commons-betwixt - - + com.rabbitmq + amqp-client + ${rabbitmq_version} - org.apache.commons - commons-pool2 - 2.5.0 + com.alibaba.mq-amqp + mq-amqp-client + ${mq_amqp_client} - commons-beanutils - commons-beanutils - 1.8.2 + org.apache.kafka + kafka-clients + ${kafka_version} + - org.apache.commons - commons-lang3 - 3.7 + org.apache.pulsar + pulsar-client + ${pulsar_version} - commons-collections - commons-collections - 3.2 + org.apache.pulsar + pulsar-client-admin + ${pulsar_version} - junit diff --git a/example/src/main/bin/startup.sh b/example/src/main/bin/startup.sh index 73b1d4827f..0a716e3a10 100644 --- a/example/src/main/bin/startup.sh +++ b/example/src/main/bin/startup.sh @@ -58,11 +58,27 @@ in exit;; esac -str=`file $JAVA_HOME/bin/java | grep 64-bit` +JavaVersion=`$JAVA -version 2>&1 |awk 'NR==1{ gsub(/"/,""); print $3 }' | awk -F '.' '{print $1}'` +str=`file -L $JAVA | grep 64-bit` + +JAVA_OPTS="$JAVA_OPTS -Xss1m -XX:+AggressiveOpts -XX:-UseBiasedLocking -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$base/logs" +if [ $JavaVersion -ge 11 ] ; then + #JAVA_OPTS="$JAVA_OPTS -Xlog:gc*:$base_log/gc.log:time " + JAVA_OPTS="$JAVA_OPTS" +else + #JAVA_OPTS="$JAVA_OPTS -Xloggc:$base/logs/canal/gc.log -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCApplicationStoppedTime" + JAVA_OPTS="$JAVA_OPTS -XX:+UseFastAccessorMethods -XX:+PrintAdaptiveSizePolicy -XX:+PrintTenuringDistribution" +fi + if [ -n "$str" ]; then - JAVA_OPTS="-server -Xms2048m -Xmx3072m -Xmn1024m -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -Xss256k -XX:-UseAdaptiveSizePolicy -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError" + if [ $JavaVersion -ge 11 ] ; then + # For G1 + JAVA_OPTS="-server -Xms2g -Xmx3g -XX:+UseG1GC -XX:MaxGCPauseMillis=250 -XX:+UseGCOverheadLimit -XX:+ExplicitGCInvokesConcurrent $JAVA_OPTS" + else + JAVA_OPTS="-server -Xms2g -Xmx3g -Xmn1g -XX:SurvivorRatio=2 -XX:PermSize=96m -XX:MaxPermSize=256m -XX:MaxTenuringThreshold=15 -XX:+DisableExplicitGC $JAVA_OPTS" + fi else - JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m " + JAVA_OPTS="-server -Xms1024m -Xmx1024m -XX:NewSize=256m -XX:MaxNewSize=256m -XX:MaxPermSize=128m $JAVA_OPTS" fi JAVA_OPTS=" $JAVA_OPTS -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dfile.encoding=UTF-8" diff --git a/example/src/main/java/com/alibaba/otter/canal/example/BaseCanalClientTest.java b/example/src/main/java/com/alibaba/otter/canal/example/BaseCanalClientTest.java index 12ad9774dc..f7d40387ec 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/BaseCanalClientTest.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/BaseCanalClientTest.java @@ -13,26 +13,19 @@ import com.alibaba.otter.canal.client.CanalConnector; import com.alibaba.otter.canal.protocol.CanalEntry; -import com.alibaba.otter.canal.protocol.CanalEntry.Column; -import com.alibaba.otter.canal.protocol.CanalEntry.Entry; -import com.alibaba.otter.canal.protocol.CanalEntry.EntryType; -import com.alibaba.otter.canal.protocol.CanalEntry.EventType; -import com.alibaba.otter.canal.protocol.CanalEntry.Pair; -import com.alibaba.otter.canal.protocol.CanalEntry.RowChange; -import com.alibaba.otter.canal.protocol.CanalEntry.RowData; -import com.alibaba.otter.canal.protocol.CanalEntry.TransactionBegin; -import com.alibaba.otter.canal.protocol.CanalEntry.TransactionEnd; import com.alibaba.otter.canal.protocol.Message; +import com.alibaba.otter.canal.protocol.CanalEntry.*; import com.google.protobuf.InvalidProtocolBufferException; public class BaseCanalClientTest { - protected final static Logger logger = LoggerFactory.getLogger(AbstractCanalClientTest.class); + protected final static Logger logger = LoggerFactory + .getLogger(AbstractCanalClientTest.class); protected static final String SEP = SystemUtils.LINE_SEPARATOR; protected static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; protected volatile boolean running = false; protected Thread.UncaughtExceptionHandler handler = (t, e) -> logger.error("parse events has an error", - e); + e); protected Thread thread = null; protected CanalConnector connector; protected static String context_format = null; @@ -51,8 +44,7 @@ public class BaseCanalClientTest { + "----------------> binlog[{}:{}] , name[{},{}] , eventType : {} , executeTime : {}({}) , gtid : ({}) , delay : {} ms" + SEP; - transaction_format = SEP - + "================> binlog[{}:{}] , executeTime : {}({}) , gtid : ({}) , delay : {}ms" + transaction_format = SEP + "================> binlog[{}:{}] , executeTime : {}({}) , gtid : ({}) , delay : {}ms" + SEP; } @@ -71,8 +63,8 @@ protected void printSummary(Message message, long batchId, int size) { } SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT); - logger.info(context_format, new Object[] { batchId, size, memsize, format.format(new Date()), startPosition, - endPosition }); + logger.info(context_format, + new Object[] { batchId, size, memsize, format.format(new Date()), startPosition, endPosition }); } protected String buildPositionForDump(Entry entry) { @@ -94,7 +86,8 @@ protected void printEntry(List entrys) { Date date = new Date(entry.getHeader().getExecuteTime()); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - if (entry.getEntryType() == EntryType.TRANSACTIONBEGIN || entry.getEntryType() == EntryType.TRANSACTIONEND) { + if (entry.getEntryType() == EntryType.TRANSACTIONBEGIN + || entry.getEntryType() == EntryType.TRANSACTIONEND) { if (entry.getEntryType() == EntryType.TRANSACTIONBEGIN) { TransactionBegin begin = null; try { @@ -105,9 +98,10 @@ protected void printEntry(List entrys) { // 打印事务头信息,执行的线程id,事务耗时 logger.info(transaction_format, new Object[] { entry.getHeader().getLogfileName(), - String.valueOf(entry.getHeader().getLogfileOffset()), - String.valueOf(entry.getHeader().getExecuteTime()), simpleDateFormat.format(date), - entry.getHeader().getGtid(), String.valueOf(delayTime) }); + String.valueOf(entry.getHeader().getLogfileOffset()), + String.valueOf(entry.getHeader().getExecuteTime()), + simpleDateFormat.format(date), entry.getHeader().getGtid(), + String.valueOf(delayTime) }); logger.info(" BEGIN ----> Thread id: {}", begin.getThreadId()); printXAInfo(begin.getPropsList()); } else if (entry.getEntryType() == EntryType.TRANSACTIONEND) { @@ -123,9 +117,10 @@ protected void printEntry(List entrys) { printXAInfo(end.getPropsList()); logger.info(transaction_format, new Object[] { entry.getHeader().getLogfileName(), - String.valueOf(entry.getHeader().getLogfileOffset()), - String.valueOf(entry.getHeader().getExecuteTime()), simpleDateFormat.format(date), - entry.getHeader().getGtid(), String.valueOf(delayTime) }); + String.valueOf(entry.getHeader().getLogfileOffset()), + String.valueOf(entry.getHeader().getExecuteTime()), + simpleDateFormat.format(date), entry.getHeader().getGtid(), + String.valueOf(delayTime) }); } continue; @@ -143,10 +138,10 @@ protected void printEntry(List entrys) { logger.info(row_format, new Object[] { entry.getHeader().getLogfileName(), - String.valueOf(entry.getHeader().getLogfileOffset()), entry.getHeader().getSchemaName(), - entry.getHeader().getTableName(), eventType, - String.valueOf(entry.getHeader().getExecuteTime()), simpleDateFormat.format(date), - entry.getHeader().getGtid(), String.valueOf(delayTime) }); + String.valueOf(entry.getHeader().getLogfileOffset()), + entry.getHeader().getSchemaName(), entry.getHeader().getTableName(), eventType, + String.valueOf(entry.getHeader().getExecuteTime()), simpleDateFormat.format(date), + entry.getHeader().getGtid(), String.valueOf(delayTime) }); if (eventType == EventType.QUERY || rowChange.getIsDdl()) { logger.info("ddl : " + rowChange.getIsDdl() + " , sql ----> " + rowChange.getSql() + SEP); @@ -174,8 +169,8 @@ protected void printColumn(List columns) { if (StringUtils.containsIgnoreCase(column.getMysqlType(), "BLOB") || StringUtils.containsIgnoreCase(column.getMysqlType(), "BINARY")) { // get value bytes - builder.append(column.getName() + " : " - + new String(column.getValue().getBytes("ISO-8859-1"), "UTF-8")); + builder.append( + column.getName() + " : " + new String(column.getValue().getBytes("ISO-8859-1"), "UTF-8")); } else { builder.append(column.getName() + " : " + column.getValue()); } diff --git a/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientPermanceTest.java b/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientPermanceTest.java index 2831ad72de..311a1147f8 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientPermanceTest.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientPermanceTest.java @@ -22,10 +22,8 @@ public static void main(String args[]) { long end = 0; final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(100); try { - final CanalConnector connector = CanalConnectors.newSingleConnector(new InetSocketAddress(ip, 11111), - destination, - "canal", - "canal"); + final CanalConnector connector = CanalConnectors + .newSingleConnector(new InetSocketAddress(ip, 11111), destination, "canal", "canal"); Thread ackThread = new Thread(() -> { while (true) { diff --git a/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientTest.java b/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientTest.java index 21a25e9b96..181bfa066e 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientTest.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/SimpleCanalClientTest.java @@ -22,10 +22,8 @@ public static void main(String args[]) { // 根据ip,直接创建链接,无HA的功能 String destination = "example"; String ip = AddressUtils.getHostIp(); - CanalConnector connector = CanalConnectors.newSingleConnector(new InetSocketAddress(ip, 11111), - destination, - "canal", - "canal"); + CanalConnector connector = CanalConnectors + .newSingleConnector(new InetSocketAddress(ip, 11111), destination, "canal", "canal"); final SimpleCanalClientTest clientTest = new SimpleCanalClientTest(destination); clientTest.setConnector(connector); diff --git a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientExample.java b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientExample.java index dfc631790e..5ee497a2cc 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientExample.java @@ -34,7 +34,8 @@ public CanalKafkaClientExample(String zkServers, String servers, String topic, I public static void main(String[] args) { try { - final CanalKafkaClientExample kafkaCanalClientExample = new CanalKafkaClientExample(AbstractKafkaTest.zkServers, + final CanalKafkaClientExample kafkaCanalClientExample = new CanalKafkaClientExample( + AbstractKafkaTest.zkServers, AbstractKafkaTest.servers, AbstractKafkaTest.topic, AbstractKafkaTest.partition, diff --git a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java index 03c5fe8c0c..0b2c2914c2 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java @@ -35,7 +35,8 @@ public CanalKafkaClientFlatMessageExample(String zkServers, String servers, Stri public static void main(String[] args) { try { - final CanalKafkaClientFlatMessageExample kafkaCanalClientExample = new CanalKafkaClientFlatMessageExample(AbstractKafkaTest.zkServers, + final CanalKafkaClientFlatMessageExample kafkaCanalClientExample = new CanalKafkaClientFlatMessageExample( + AbstractKafkaTest.zkServers, AbstractKafkaTest.servers, AbstractKafkaTest.topic, AbstractKafkaTest.partition, diff --git a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaOffsetClientExample.java b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaOffsetClientExample.java index e5732f520d..edf41ee8b5 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaOffsetClientExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaOffsetClientExample.java @@ -43,7 +43,8 @@ public CanalKafkaOffsetClientExample(String servers, String topic, Integer parti public static void main(String[] args) { try { - final CanalKafkaOffsetClientExample kafkaCanalClientExample = new CanalKafkaOffsetClientExample(AbstractKafkaTest.servers, + final CanalKafkaOffsetClientExample kafkaCanalClientExample = new CanalKafkaOffsetClientExample( + AbstractKafkaTest.servers, AbstractKafkaTest.topic, AbstractKafkaTest.partition, AbstractKafkaTest.groupId); diff --git a/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientExample.java b/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientExample.java index b208d514a2..cd0a554335 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientExample.java @@ -1,13 +1,15 @@ package com.alibaba.otter.canal.example.rocketmq; -import com.alibaba.otter.canal.client.rocketmq.RocketMQCanalConnector; -import com.alibaba.otter.canal.protocol.Message; import java.util.List; import java.util.concurrent.TimeUnit; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.Assert; +import com.alibaba.otter.canal.client.rocketmq.RocketMQCanalConnector; +import com.alibaba.otter.canal.protocol.Message; + /** * RocketMQ client example * @@ -26,15 +28,23 @@ public class CanalRocketMQClientExample extends AbstractRocektMQTest { private Thread.UncaughtExceptionHandler handler = (t, e) -> logger.error("parse events has an error", e); - public CanalRocketMQClientExample(String nameServers, String topic, String groupId) { + public CanalRocketMQClientExample(String nameServers, String topic, String groupId){ connector = new RocketMQCanalConnector(nameServers, topic, groupId, 500, false); } public CanalRocketMQClientExample(String nameServers, String topic, String groupId, boolean enableMessageTrace, - String accessKey, String secretKey, String accessChannel, String namespace) { - connector = new RocketMQCanalConnector(nameServers, topic, groupId, accessKey, - secretKey, -1, false, enableMessageTrace, - null, accessChannel, namespace); + String accessKey, String secretKey, String accessChannel, String namespace){ + connector = new RocketMQCanalConnector(nameServers, + topic, + groupId, + accessKey, + secretKey, + -1, + false, + enableMessageTrace, + null, + accessChannel, + namespace); } public static void main(String[] args) { diff --git a/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientFlatMessageExample.java b/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientFlatMessageExample.java index 6f5a73c698..288e032e8b 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientFlatMessageExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/rocketmq/CanalRocketMQClientFlatMessageExample.java @@ -18,7 +18,8 @@ */ public class CanalRocketMQClientFlatMessageExample extends AbstractRocektMQTest { - protected final static Logger logger = LoggerFactory.getLogger(CanalRocketMQClientFlatMessageExample.class); + protected final static Logger logger = LoggerFactory + .getLogger(CanalRocketMQClientFlatMessageExample.class); private RocketMQCanalConnector connector; @@ -34,7 +35,8 @@ public CanalRocketMQClientFlatMessageExample(String nameServers, String topic, S public static void main(String[] args) { try { - final CanalRocketMQClientFlatMessageExample rocketMQClientExample = new CanalRocketMQClientFlatMessageExample(nameServers, + final CanalRocketMQClientFlatMessageExample rocketMQClientExample = new CanalRocketMQClientFlatMessageExample( + nameServers, topic, groupId); logger.info("## Start the rocketmq consumer: {}-{}", topic, groupId); diff --git a/example/src/main/resources/client-spring.xml b/example/src/main/resources/client-spring.xml deleted file mode 100644 index 29eba1af22..0000000000 --- a/example/src/main/resources/client-spring.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - classpath:client.properties - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/example/src/main/resources/client.properties b/example/src/main/resources/client.properties deleted file mode 100644 index c5142912a1..0000000000 --- a/example/src/main/resources/client.properties +++ /dev/null @@ -1,16 +0,0 @@ -# client 配置 -zk.servers=127.0.0.1:2181 -# 5 * 1024 -client.batch.size=5120 -client.debug=false -client.destination=example -client.username=canal -client.password=canal -client.exceptionstrategy=1 -client.retrytimes=3 -client.filter=.*\\..* - -# 同步目标: mysql 配置 -target.mysql.url=jdbc:mysql://127.0.0.1:4306 -target.mysql.username=root -target.mysql.password=123456 diff --git a/filter/pom.xml b/filter/pom.xml index c971a2081e..d5f66c6af8 100644 --- a/filter/pom.xml +++ b/filter/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/filter/src/main/java/com/alibaba/otter/canal/filter/aviater/AviaterRegexFilter.java b/filter/src/main/java/com/alibaba/otter/canal/filter/aviater/AviaterRegexFilter.java index 48b51b2514..cb20762af1 100644 --- a/filter/src/main/java/com/alibaba/otter/canal/filter/aviater/AviaterRegexFilter.java +++ b/filter/src/main/java/com/alibaba/otter/canal/filter/aviater/AviaterRegexFilter.java @@ -109,11 +109,11 @@ public int compare(String str1, String str2) { private List completionPattern(List patterns) { List result = new ArrayList<>(); for (String pattern : patterns) { - StringBuffer stringBuffer = new StringBuffer(); - stringBuffer.append("^"); - stringBuffer.append(pattern); - stringBuffer.append("$"); - result.add(stringBuffer.toString()); + StringBuilder stringBuilder = new StringBuilder(); + stringBuilder.append("^"); + stringBuilder.append(pattern); + stringBuilder.append("$"); + result.add(stringBuilder.toString()); } return result; } diff --git a/instance/core/pom.xml b/instance/core/pom.xml index 5950588de4..9184f8bdc8 100644 --- a/instance/core/pom.xml +++ b/instance/core/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../../pom.xml canal.instance.core diff --git a/instance/manager/pom.xml b/instance/manager/pom.xml index 2b9baba990..6cc524bb7e 100644 --- a/instance/manager/pom.xml +++ b/instance/manager/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../../pom.xml com.alibaba.otter diff --git a/instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java b/instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java index 5d1da5db0f..397b1efe25 100644 --- a/instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java +++ b/instance/manager/src/main/java/com/alibaba/otter/canal/instance/manager/CanalInstanceWithManager.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.stream.Collectors; +import com.alibaba.fastjson2.JSONObject; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; @@ -17,7 +18,6 @@ import org.slf4j.LoggerFactory; import org.springframework.util.CollectionUtils; -import com.alibaba.fastjson.JSONObject; import com.alibaba.otter.canal.common.CanalException; import com.alibaba.otter.canal.common.alarm.CanalAlarmHandler; import com.alibaba.otter.canal.common.alarm.LogAlarmHandler; @@ -27,13 +27,7 @@ import com.alibaba.otter.canal.instance.core.AbstractCanalInstance; import com.alibaba.otter.canal.instance.manager.model.Canal; import com.alibaba.otter.canal.instance.manager.model.CanalParameter; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.DataSourcing; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.HAMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.IndexMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.MetaMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.SourcingType; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.StorageMode; -import com.alibaba.otter.canal.instance.manager.model.CanalParameter.StorageScavengeMode; +import com.alibaba.otter.canal.instance.manager.model.CanalParameter.*; import com.alibaba.otter.canal.meta.FileMixedMetaManager; import com.alibaba.otter.canal.meta.MemoryMetaManager; import com.alibaba.otter.canal.meta.PeriodMixedMetaManager; @@ -335,6 +329,7 @@ private CanalEventParser doInitEventParser(SourcingType type, List resp = JSONObject.parseObject(response, + ResponseModel resp = JSON.parseObject(response, new TypeReference>() { }); diff --git a/instance/pom.xml b/instance/pom.xml index 90992c8b02..ec2fffe165 100644 --- a/instance/pom.xml +++ b/instance/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/instance/spring/pom.xml b/instance/spring/pom.xml index 09b3a8c524..be3bc8f466 100644 --- a/instance/spring/pom.xml +++ b/instance/spring/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../../pom.xml com.alibaba.otter diff --git a/meta/pom.xml b/meta/pom.xml index c268b25d37..5d60b6e878 100644 --- a/meta/pom.xml +++ b/meta/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/meta/src/main/java/com/alibaba/otter/canal/meta/FileMixedMetaManager.java b/meta/src/main/java/com/alibaba/otter/canal/meta/FileMixedMetaManager.java index 53d927015f..052e923cb7 100644 --- a/meta/src/main/java/com/alibaba/otter/canal/meta/FileMixedMetaManager.java +++ b/meta/src/main/java/com/alibaba/otter/canal/meta/FileMixedMetaManager.java @@ -171,7 +171,7 @@ private FileMetaInstanceData loadDataFromFile(File dataFile) { return null; } - String json = FileUtils.readFileToString(dataFile, charset.name()); + String json = FileUtils.readFileToString(dataFile, charset); return JsonUtils.unmarshalFromString(json, FileMetaInstanceData.class); } catch (IOException e) { throw new CanalMetaManagerException(e); @@ -209,7 +209,11 @@ private void flushDataToFile(String destination, File dataFile) { data.setClientDatas(clientDatas); } - + //fixed issue https://github.com/alibaba/canal/issues/4312 + //客户端数据为空时不覆盖文件内容 (适合单客户端) + if(data.getClientDatas().isEmpty()){ + return; + } String json = JsonUtils.marshalToString(data); try { FileUtils.writeStringToFile(dataFile, json); diff --git a/meta/src/main/java/com/alibaba/otter/canal/meta/ZooKeeperMetaManager.java b/meta/src/main/java/com/alibaba/otter/canal/meta/ZooKeeperMetaManager.java index a9e65b24d7..f2d6f76036 100644 --- a/meta/src/main/java/com/alibaba/otter/canal/meta/ZooKeeperMetaManager.java +++ b/meta/src/main/java/com/alibaba/otter/canal/meta/ZooKeeperMetaManager.java @@ -12,7 +12,7 @@ import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; -import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson2.JSONWriter; import com.alibaba.otter.canal.common.AbstractCanalLifeCycle; import com.alibaba.otter.canal.common.utils.JsonUtils; import com.alibaba.otter.canal.common.zookeeper.ZkClientx; @@ -155,7 +155,7 @@ public Position getCursor(ClientIdentity clientIdentity) throws CanalMetaManager public void updateCursor(ClientIdentity clientIdentity, Position position) throws CanalMetaManagerException { String path = ZookeeperPathUtils.getCursorPath(clientIdentity.getDestination(), clientIdentity.getClientId()); - byte[] data = JsonUtils.marshalToByte(position, SerializerFeature.WriteClassName); + byte[] data = JsonUtils.marshalToByte(position, JSONWriter.Feature.WriteClassName); try { zkClientx.writeData(path, data); } catch (ZkNoNodeException e) { @@ -166,7 +166,7 @@ public void updateCursor(ClientIdentity clientIdentity, Position position) throw public Long addBatch(ClientIdentity clientIdentity, PositionRange positionRange) throws CanalMetaManagerException { String path = ZookeeperPathUtils.getBatchMarkPath(clientIdentity.getDestination(), clientIdentity.getClientId()); - byte[] data = JsonUtils.marshalToByte(positionRange, SerializerFeature.WriteClassName); + byte[] data = JsonUtils.marshalToByte(positionRange, JSONWriter.Feature.WriteClassName); String batchPath = zkClientx .createPersistentSequential(path + ZookeeperPathUtils.ZOOKEEPER_SEPARATOR, data, true); String batchIdString = StringUtils.substringAfterLast(batchPath, ZookeeperPathUtils.ZOOKEEPER_SEPARATOR); @@ -177,7 +177,7 @@ public void addBatch(ClientIdentity clientIdentity, PositionRange positionRange, Long batchId) throws CanalMetaManagerException { String path = ZookeeperPathUtils .getBatchMarkWithIdPath(clientIdentity.getDestination(), clientIdentity.getClientId(), batchId); - byte[] data = JsonUtils.marshalToByte(positionRange, SerializerFeature.WriteClassName); + byte[] data = JsonUtils.marshalToByte(positionRange, JSONWriter.Feature.WriteClassName); zkClientx.createPersistent(path, data, true); } diff --git a/parse/pom.xml b/parse/pom.xml index f180d14e1a..b6c695b38f 100644 --- a/parse/pom.xml +++ b/parse/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml canal.parse diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/AbstractEventParser.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/AbstractEventParser.java index 4d6d1cc48e..53a6971f05 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/AbstractEventParser.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/AbstractEventParser.java @@ -1,5 +1,7 @@ package com.alibaba.otter.canal.parse.inbound; +import static com.alibaba.otter.canal.parse.driver.mysql.utils.GtidUtil.parseGtidSet; + import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/AbstractMysqlEventParser.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/AbstractMysqlEventParser.java index 1e646f7c77..bb4985f157 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/AbstractMysqlEventParser.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/AbstractMysqlEventParser.java @@ -38,6 +38,9 @@ public abstract class AbstractMysqlEventParser extends AbstractEventParser iterateEvents = decoder.processIterateDecode(event, context); + if (!iterateEvents.isEmpty()) { + // 处理compress event + for(LogEvent itEvent : iterateEvents) { + if (!func.sink(event)) { + needContinue = false; + break; + } + } + } else { + if (!func.sink(event)) { + needContinue = false; + break; + } } } fetcher.close(); // 关闭上一个文件 parserFinish(current.getName()); if (needContinue) {// 读取下一个 - File nextFile; if (needWait) { nextFile = binlogs.waitForNextFile(current); diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java index 6d2b73522e..d2ca3794f2 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlConnection.java @@ -217,8 +217,18 @@ public void dump(GTIDSet gtidSet, SinkFunction func) throws IOException { throw new CanalParseException("parse failed"); } - if (!func.sink(event)) { - break; + List iterateEvents = decoder.processIterateDecode(event, context); + if (!iterateEvents.isEmpty()) { + // 处理compress event + for(LogEvent itEvent : iterateEvents) { + if (!func.sink(event)) { + break; + } + } + } else { + if (!func.sink(event)) { + break; + } } } } @@ -344,9 +354,9 @@ public void sendSemiAck(String binlogfilename, Long binlogPosition) throws IOExc private void sendBinlogDumpGTID(GTIDSet gtidSet) throws IOException { if (isMariaDB()) { sendMariaBinlogDumpGTID(gtidSet); - return; + } else { + sendMySQLBinlogDumpGTID(gtidSet); } - sendMySQLBinlogDumpGTID(gtidSet); } private void sendMySQLBinlogDumpGTID(GTIDSet gtidSet) throws IOException { diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlEventParser.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlEventParser.java index f1dd745811..c36cd83caa 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlEventParser.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlEventParser.java @@ -73,6 +73,9 @@ public class MysqlEventParser extends AbstractMysqlEventParser implements CanalE private boolean autoResetLatestPosMode = false; // true: // binlog被删除之后,自动按最新的数据订阅 + + private boolean multiStreamEnable;//support for polardbx binlog-x + @Override protected ErosaConnection buildErosaConnection() { return buildMysqlConnection(this.runningInfo); @@ -353,7 +356,7 @@ private final long generateUniqueServerId() { @Override protected EntryPosition findStartPosition(ErosaConnection connection) throws IOException { if (isGTIDMode()) { - // GTID模式下,CanalLogPositionManager里取最后的gtid,没有则取instanc配置中的 + // GTID模式下,CanalLogPositionManager里取最后的gtid,没有则取instance配置中的 LogPosition logPosition = getLogPositionManager().getLatestIndexBy(destination); if (logPosition != null) { // 如果以前是非GTID模式,后来调整为了GTID模式,那么为了保持兼容,需要判断gtid是否为空 @@ -391,7 +394,7 @@ protected EntryPosition findEndPosition(ErosaConnection connection) throws IOExc protected EntryPosition findEndPositionWithMasterIdAndTimestamp(MysqlConnection connection) { MysqlConnection mysqlConnection = (MysqlConnection) connection; final EntryPosition endPosition = findEndPosition(mysqlConnection); - if (tableMetaTSDB != null) { + if (tableMetaTSDB != null || isGTIDMode()) { long startTimestamp = System.currentTimeMillis(); return findAsPerTimestampInSpecificLogFile(mysqlConnection, startTimestamp, @@ -436,7 +439,8 @@ protected EntryPosition findStartPositionInternal(ErosaConnection connection) { } if (entryPosition == null) { - entryPosition = findEndPositionWithMasterIdAndTimestamp(mysqlConnection); // 默认从当前最后一个位置进行消费 + entryPosition = + findEndPositionWithMasterIdAndTimestamp(mysqlConnection); // 默认从当前最后一个位置进行消费 } // 判断一下是否需要按时间订阅 @@ -679,10 +683,12 @@ private Long findServerId(MysqlConnection mysqlConnection) { */ private EntryPosition findEndPosition(MysqlConnection mysqlConnection) { try { - ResultSetPacket packet = mysqlConnection.query("show master status"); + String showSql = multiStreamEnable ? "show master status with " + destination : "show master status"; + ResultSetPacket packet = mysqlConnection.query(showSql); List fields = packet.getFieldValues(); if (CollectionUtils.isEmpty(fields)) { - throw new CanalParseException("command : 'show master status' has an error! pls check. you need (at least one of) the SUPER,REPLICATION CLIENT privilege(s) for this operation"); + throw new CanalParseException( + "command : 'show master status' has an error! pls check. you need (at least one of) the SUPER,REPLICATION CLIENT privilege(s) for this operation"); } EntryPosition endPosition = new EntryPosition(fields.get(0), Long.valueOf(fields.get(1))); if (isGTIDMode() && fields.size() > 4) { @@ -707,10 +713,13 @@ private EntryPosition findEndPosition(MysqlConnection mysqlConnection) { */ private EntryPosition findStartPosition(MysqlConnection mysqlConnection) { try { - ResultSetPacket packet = mysqlConnection.query("show binlog events limit 1"); + String showSql = multiStreamEnable ? + "show binlog events with " + destination + " limit 1" : "show binlog events limit 1"; + ResultSetPacket packet = mysqlConnection.query(showSql); List fields = packet.getFieldValues(); if (CollectionUtils.isEmpty(fields)) { - throw new CanalParseException("command : 'show binlog events limit 1' has an error! pls check. you need (at least one of) the SUPER,REPLICATION CLIENT privilege(s) for this operation"); + throw new CanalParseException( + "command : 'show binlog events limit 1' has an error! pls check. you need (at least one of) the SUPER,REPLICATION CLIENT privilege(s) for this operation"); } EntryPosition endPosition = new EntryPosition(fields.get(0), Long.valueOf(fields.get(1))); return endPosition; @@ -968,4 +977,8 @@ public boolean isAutoResetLatestPosMode() { public void setAutoResetLatestPosMode(boolean autoResetLatestPosMode) { this.autoResetLatestPosMode = autoResetLatestPosMode; } + + public void setMultiStreamEnable(boolean multiStreamEnable) { + this.multiStreamEnable = multiStreamEnable; + } } diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlMultiStageCoprocessor.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlMultiStageCoprocessor.java index b174328fa5..89a99d5b55 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlMultiStageCoprocessor.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/MysqlMultiStageCoprocessor.java @@ -1,8 +1,10 @@ package com.alibaba.otter.canal.parse.inbound.mysql; +import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import org.apache.commons.compress.utils.Lists; import com.alibaba.otter.canal.common.utils.NamedThreadFactory; import com.alibaba.otter.canal.parse.driver.mysql.packets.GTIDSet; import com.alibaba.otter.canal.parse.exception.CanalParseException; @@ -27,11 +29,7 @@ import com.taobao.tddl.dbsync.binlog.LogContext; import com.taobao.tddl.dbsync.binlog.LogDecoder; import com.taobao.tddl.dbsync.binlog.LogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; +import com.taobao.tddl.dbsync.binlog.event.*; /** * 针对解析器提供一个多阶段协同的处理 @@ -159,48 +157,71 @@ public void onEvent(MessageEvent event, long sequence, boolean endOfBatch) throw } int eventType = logEvent.getHeader().getType(); - TableMeta tableMeta = null; - boolean needDmlParse = false; - switch (eventType) { - case LogEvent.WRITE_ROWS_EVENT_V1: - case LogEvent.WRITE_ROWS_EVENT: - if (!filterDmlInsert) { - tableMeta = logEventConvert.parseRowsEventForTableMeta((WriteRowsLogEvent) logEvent); - needDmlParse = true; - } - break; - case LogEvent.UPDATE_ROWS_EVENT_V1: - case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: - case LogEvent.UPDATE_ROWS_EVENT: - if (!filterDmlUpdate) { - tableMeta = logEventConvert.parseRowsEventForTableMeta((UpdateRowsLogEvent) logEvent); - needDmlParse = true; - } - break; - case LogEvent.DELETE_ROWS_EVENT_V1: - case LogEvent.DELETE_ROWS_EVENT: - if (!filterDmlDelete) { - tableMeta = logEventConvert.parseRowsEventForTableMeta((DeleteRowsLogEvent) logEvent); - needDmlParse = true; - } - break; - case LogEvent.ROWS_QUERY_LOG_EVENT: - needDmlParse = true; - break; - default: - CanalEntry.Entry entry = logEventConvert.parse(event.getEvent(), false); - event.setEntry(entry); + boolean needIterate = false; + + if (eventType == LogEvent.TRANSACTION_PAYLOAD_EVENT) { + // https://github.com/alibaba/canal/issues/4388 + List deLogEvents = decoder.processIterateDecode(logEvent, context); + List tableMetas = Lists.newArrayList(); + event.setNeedIterate(true); + for (LogEvent deLogEvent : deLogEvents) { + TableMeta table = processEvent(deLogEvent, event); + tableMetas.add(table); + } + event.setIterateEvents(deLogEvents); + event.setIterateTables(tableMetas); + } else { + TableMeta table = processEvent(logEvent, event); + event.setTable(table); } - - // 记录一下DML的表结构 - event.setNeedDmlParse(needDmlParse); - event.setTable(tableMeta); } catch (Throwable e) { exception = new CanalParseException(e); throw exception; } } + private TableMeta processEvent(LogEvent logEvent, MessageEvent event) { + TableMeta tableMeta = null; + boolean needDmlParse = false; + int eventType = logEvent.getHeader().getType(); + switch (eventType) { + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + if (!filterDmlInsert) { + tableMeta = logEventConvert.parseRowsEventForTableMeta((WriteRowsLogEvent) logEvent); + needDmlParse = true; + } + break; + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + if (!filterDmlUpdate) { + tableMeta = logEventConvert.parseRowsEventForTableMeta((UpdateRowsLogEvent) logEvent); + needDmlParse = true; + } + break; + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + if (!filterDmlDelete) { + tableMeta = logEventConvert.parseRowsEventForTableMeta((DeleteRowsLogEvent) logEvent); + needDmlParse = true; + } + break; + case LogEvent.ROWS_QUERY_LOG_EVENT: + needDmlParse = true; + break; + default: + CanalEntry.Entry entry = logEventConvert.parse(event.getEvent(), false); + event.setEntry(entry); + } + + // 记录一下DML的表结构 + if (needDmlParse && !event.isNeedDmlParse()) { + event.setNeedDmlParse(true); + } + return tableMeta; + } + @Override public void onStart() { @@ -218,18 +239,21 @@ private class DmlParserStage implements WorkHandler, LifecycleAwar public void onEvent(MessageEvent event) throws Exception { try { if (event.isNeedDmlParse()) { - int eventType = event.getEvent().getHeader().getType(); - CanalEntry.Entry entry = null; - switch (eventType) { - case LogEvent.ROWS_QUERY_LOG_EVENT: - entry = logEventConvert.parse(event.getEvent(), false); - break; - default: - // 单独解析dml事件 - entry = logEventConvert.parseRowsEvent((RowsLogEvent) event.getEvent(), event.getTable()); + if (event.isNeedIterate()) { + // compress binlog + List entrys = Lists.newArrayList(); + for (int index = 0; index < event.getIterateEvents().size(); index++) { + CanalEntry.Entry entry = processEvent(event.getIterateEvents().get(index), + event.getIterateTables().get(index)); + if (entry != null) { + entrys.add(entry); + } + } + event.setIterateEntrys(entrys); + } else { + CanalEntry.Entry entry = processEvent(event.getEvent(), event.getTable()); + event.setEntry(entry); } - - event.setEntry(entry); } } catch (Throwable e) { exception = new CanalParseException(e); @@ -237,6 +261,29 @@ public void onEvent(MessageEvent event) throws Exception { } } + private CanalEntry.Entry processEvent(LogEvent logEvent, TableMeta table) { + int eventType = logEvent.getHeader().getType(); + CanalEntry.Entry entry = null; + switch (eventType) { + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + // 单独解析dml事件 + entry = logEventConvert.parseRowsEvent((RowsLogEvent) logEvent, table); + break; + default: + // 如果出现compress binlog,会出现其他的event type类型 + entry = logEventConvert.parse(logEvent, false); + break; + } + + return entry; + } + @Override public void onStart() { @@ -253,8 +300,15 @@ private class SinkStoreStage implements EventHandler, LifecycleAwa @Override public void onEvent(MessageEvent event, long sequence, boolean endOfBatch) throws Exception { try { - if (event.getEntry() != null) { - transactionBuffer.add(event.getEntry()); + if (event.isNeedIterate()) { + // compress binlog + for (CanalEntry.Entry entry : event.getIterateEntrys()) { + transactionBuffer.add(entry); + } + } else { + if (event.getEntry() != null) { + transactionBuffer.add(event.getEntry()); + } } LogEvent logEvent = event.getEvent(); @@ -269,7 +323,12 @@ public void onEvent(MessageEvent event, long sequence, boolean endOfBatch) throw event.setEvent(null); event.setTable(null); event.setEntry(null); + // clear compress binlog events event.setNeedDmlParse(false); + event.setNeedIterate(false); + event.setIterateEntrys(null); + event.setIterateTables(null); + event.setIterateEvents(null); } catch (Throwable e) { exception = new CanalParseException(e); throw exception; @@ -294,6 +353,11 @@ static class MessageEvent { private boolean needDmlParse = false; private TableMeta table; private LogEvent event; + private boolean needIterate = false; + // compress binlog + private List iterateEvents; + private List iterateTables; + private List iterateEntrys; public LogBuffer getBuffer() { return buffer; @@ -335,6 +399,37 @@ public void setTable(TableMeta table) { this.table = table; } + public boolean isNeedIterate() { + return needIterate; + } + + public void setNeedIterate(boolean needIterate) { + this.needIterate = needIterate; + } + + public List getIterateEvents() { + return iterateEvents; + } + + public List getIterateTables() { + return iterateTables; + } + + public void setIterateEvents(List iterateEvents) { + this.iterateEvents = iterateEvents; + } + + public void setIterateTables(List iterateTables) { + this.iterateTables = iterateTables; + } + + public List getIterateEntrys() { + return iterateEntrys; + } + + public void setIterateEntrys(List iterateEntrys) { + this.iterateEntrys = iterateEntrys; + } } static class MessageEventFactory implements EventFactory { diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/LogEventConvert.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/LogEventConvert.java index 9949011d3e..bf8cfc8d68 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/LogEventConvert.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/LogEventConvert.java @@ -10,6 +10,7 @@ import java.util.BitSet; import java.util.List; +import com.taobao.tddl.dbsync.binlog.event.*; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; @@ -36,23 +37,7 @@ import com.alibaba.otter.canal.protocol.position.EntryPosition; import com.google.protobuf.ByteString; import com.taobao.tddl.dbsync.binlog.LogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.GtidLogEvent; -import com.taobao.tddl.dbsync.binlog.event.HeartbeatLogEvent; -import com.taobao.tddl.dbsync.binlog.event.IntvarLogEvent; -import com.taobao.tddl.dbsync.binlog.event.LogHeader; -import com.taobao.tddl.dbsync.binlog.event.QueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RandLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer; -import com.taobao.tddl.dbsync.binlog.event.RowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent; import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent.ColumnInfo; -import com.taobao.tddl.dbsync.binlog.event.UnknownLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.UserVarLogEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.XidLogEvent; import com.taobao.tddl.dbsync.binlog.event.mariadb.AnnotateRowsEvent; import com.taobao.tddl.dbsync.binlog.event.mariadb.MariaGtidListLogEvent; import com.taobao.tddl.dbsync.binlog.event.mariadb.MariaGtidLogEvent; @@ -132,6 +117,8 @@ public Entry parse(LogEvent logEvent, boolean isSeek) throws CanalParseException return parseGTIDLogEvent((GtidLogEvent) logEvent); case LogEvent.HEARTBEAT_LOG_EVENT: return parseHeartbeatLogEvent((HeartbeatLogEvent) logEvent); + case LogEvent.HEARTBEAT_LOG_EVENT_V2: + return parseHeartbeatV2LogEvent((HeartbeatV2LogEvent) logEvent); case LogEvent.GTID_EVENT: case LogEvent.GTID_LIST_EVENT: return parseMariaGTIDLogEvent(logEvent); @@ -159,13 +146,22 @@ private Entry parseHeartbeatLogEvent(HeartbeatLogEvent logEvent) { return entryBuilder.build(); } + private Entry parseHeartbeatV2LogEvent(HeartbeatV2LogEvent logEvent) { + Header.Builder headerBuilder = Header.newBuilder(); + headerBuilder.setEventType(EventType.MHEARTBEAT); + Entry.Builder entryBuilder = Entry.newBuilder(); + entryBuilder.setHeader(headerBuilder.build()); + entryBuilder.setEntryType(EntryType.HEARTBEAT); + return entryBuilder.build(); + } + private Entry parseGTIDLogEvent(GtidLogEvent logEvent) { LogHeader logHeader = logEvent.getHeader(); Pair.Builder builder = Pair.newBuilder(); builder.setKey("gtid"); builder.setValue(logEvent.getGtidStr()); - if (logEvent.getLastCommitted() != null) { + if (logEvent.getLastCommitted() != -1) { builder.setKey("lastCommitted"); builder.setValue(String.valueOf(logEvent.getLastCommitted())); builder.setKey("sequenceNumber"); @@ -272,12 +268,18 @@ private Entry parseQueryEvent(QueryLogEvent event, boolean isSeek) { boolean isDml = (type == EventType.INSERT || type == EventType.UPDATE || type == EventType.DELETE); + // filterQueryDdl=true的情况下,也得更新tablemeta if (!isSeek && !isDml) { // 使用新的表结构元数据管理方式 EntryPosition position = createPosition(event.getHeader()); tableMetaCache.apply(position, event.getDbName(), queryString, null); } + if (filterQueryDdl) { + // 全部DDL过滤,那就忽略事件生成 + return null; + } + Header header = createHeader(event.getHeader(), schemaName, tableName, type); RowChange.Builder rowChangeBuilder = RowChange.newBuilder(); rowChangeBuilder.setIsDdl(!isDml); @@ -326,12 +328,8 @@ private boolean processFilter(String queryString, DdlResult result) { || result.getType() == EventType.RENAME || result.getType() == EventType.CINDEX || result.getType() == EventType.DINDEX) { // 针对DDL类型 - if (filterQueryDdl) { - return true; - } - - if (StringUtils.isEmpty(tableName) - || (result.getType() == EventType.RENAME && StringUtils.isEmpty(result.getOriTableName()))) { + if (!filterQueryDdl && (StringUtils.isEmpty(tableName) + || (result.getType() == EventType.RENAME && StringUtils.isEmpty(result.getOriTableName())))) { // 如果解析不出tableName,记录一下日志,方便bugfix,目前直接抛出异常,中断解析 throw new CanalParseException("SimpleDdlParser process query failed. pls submit issue with this queryString: " + queryString + " , and DdlResult: " + result.toString()); @@ -543,7 +541,7 @@ public Entry parseRowsEvent(RowsLogEvent event, TableMeta tableMeta) { rowChangeBuider.setIsDdl(false); rowChangeBuider.setEventType(eventType); - RowsLogBuffer buffer = event.getRowsBuf(charset.name()); + RowsLogBuffer buffer = event.getRowsBuf(charset); BitSet columns = event.getColumns(); BitSet changeColumns = event.getChangeColumns(); @@ -747,6 +745,23 @@ private boolean parseOneRow(RowData.Builder rowDataBuilder, RowsLogEvent event, int javaType = buffer.getJavaType(); if (buffer.isNull()) { columnBuilder.setIsNull(true); + + // 处理各种类型 + switch (javaType) { + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + + // https://github.com/alibaba/canal/issues/4652 + // mysql binlog中blob/text都处理为blob类型,需要反查table + // meta,按编码解析text + if (fieldMeta != null && isText(fieldMeta.getColumnType())) { + javaType = Types.CLOB; + } else { + javaType = Types.BLOB; + } + break; + } } else { final Serializable value = buffer.getValue(); // 处理各种类型 diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/TableMetaCache.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/TableMetaCache.java index 24615f9eb1..a7409b999b 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/TableMetaCache.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/dbsync/TableMetaCache.java @@ -31,12 +31,12 @@ */ public class TableMetaCache { - public static final String COLUMN_NAME = "COLUMN_NAME"; - public static final String COLUMN_TYPE = "COLUMN_TYPE"; - public static final String IS_NULLABLE = "IS_NULLABLE"; - public static final String COLUMN_KEY = "COLUMN_KEY"; - public static final String COLUMN_DEFAULT = "COLUMN_DEFAULT"; - public static final String EXTRA = "EXTRA"; + public static final String COLUMN_NAME = "field"; + public static final String COLUMN_TYPE = "type"; + public static final String IS_NULLABLE = "null"; + public static final String COLUMN_KEY = "key"; + public static final String COLUMN_DEFAULT = "default"; + public static final String EXTRA = "extra"; private MysqlConnection connection; private boolean isOnRDS = false; private boolean isOnPolarX = false; @@ -125,7 +125,7 @@ public static List parseTableMetaByDesc(ResultSetPacket packet) { Map nameMaps = new HashMap<>(6, 1f); int index = 0; for (FieldPacket fieldPacket : packet.getFieldDescriptors()) { - nameMaps.put(fieldPacket.getOriginalName(), index++); + nameMaps.put(StringUtils.lowerCase(fieldPacket.getName()), index++); } int size = packet.getFieldDescriptors().size(); diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/ddl/DdlResult.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/ddl/DdlResult.java index 9a7a5bb561..889b359adf 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/ddl/DdlResult.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/ddl/DdlResult.java @@ -100,7 +100,7 @@ public DdlResult clone() { @Override public String toString() { DdlResult ddlResult = this; - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); do { sb.append(String.format("DdlResult [schemaName=%s , tableName=%s , oriSchemaName=%s , oriTableName=%s , type=%s ];", ddlResult.schemaName, diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/BinlogDownloadQueue.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/BinlogDownloadQueue.java index 0b0c36f4d3..807ffe4924 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/BinlogDownloadQueue.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/BinlogDownloadQueue.java @@ -1,12 +1,6 @@ package com.alibaba.otter.canal.parse.inbound.mysql.rds; -import io.netty.handler.codec.http.HttpResponseStatus; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; +import java.io.*; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedList; @@ -22,6 +16,7 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; +import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; @@ -40,6 +35,8 @@ import com.alibaba.otter.canal.parse.exception.CanalParseException; import com.alibaba.otter.canal.parse.inbound.mysql.rds.data.BinlogFile; +import io.netty.handler.codec.http.HttpResponseStatus; + /** * @author chengjin.lyf on 2018/8/7 下午3:10 * @since 1.0.25 @@ -209,7 +206,9 @@ private void download(BinlogFile binlogFile) throws Throwable { private static void saveFile(File parentFile, String fileName, HttpResponse response) throws IOException { InputStream is = response.getEntity().getContent(); - long totalSize = Long.parseLong(response.getFirstHeader("Content-Length").getValue()); + boolean isChunked = response.getEntity().isChunked(); + Header contentLengthHeader = response.getFirstHeader("Content-Length"); + long totalSize = (isChunked || contentLengthHeader == null) ? 0 : Long.parseLong(contentLengthHeader.getValue()); if (response.getFirstHeader("Content-Disposition") != null) { fileName = response.getFirstHeader("Content-Disposition").getValue(); fileName = StringUtils.substringAfter(fileName, "filename="); @@ -262,11 +261,13 @@ private static void saveFile(File parentFile, String fileName, HttpResponse resp while ((len = is.read(buffer)) != -1) { fos.write(buffer, 0, len); copySize += len; - long progress = copySize * 100 / totalSize; - if (progress >= nextPrintProgress) { - logger.info("download " + file.getName() + " progress : " + progress + if (totalSize > 0){ + long progress = copySize * 100 / totalSize; + if (progress >= nextPrintProgress) { + logger.info("download " + file.getName() + " progress : " + progress + "% , download size : " + copySize + ", total size : " + totalSize); - nextPrintProgress += 10; + nextPrintProgress += 10; + } } } logger.info("download file " + file.getName() + " end!"); diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBackupPolicyRequest.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBackupPolicyRequest.java index cf51961aad..0578078c93 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBackupPolicyRequest.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBackupPolicyRequest.java @@ -3,8 +3,8 @@ import org.apache.http.HttpResponse; import org.apache.http.util.EntityUtils; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONObject; import com.alibaba.otter.canal.parse.inbound.mysql.rds.data.RdsBackupPolicy; /** diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBinlogFilesRequest.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBinlogFilesRequest.java index 8aa9056960..dfa3f13a16 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBinlogFilesRequest.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/rds/request/DescribeBinlogFilesRequest.java @@ -2,11 +2,11 @@ import java.util.Date; +import com.alibaba.fastjson2.JSON; import org.apache.http.HttpResponse; import org.apache.http.util.EntityUtils; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; +import com.alibaba.fastjson2.TypeReference; import com.alibaba.otter.canal.parse.inbound.mysql.rds.data.DescribeBinlogFileResult; /** @@ -47,7 +47,7 @@ public void setResourceOwnerId(Long resourceOwnerId) { @Override protected DescribeBinlogFileResult processResult(HttpResponse response) throws Exception { String result = EntityUtils.toString(response.getEntity()); - DescribeBinlogFileResult describeBinlogFileResult = JSONObject.parseObject(result, + DescribeBinlogFileResult describeBinlogFileResult = JSON.parseObject(result, new TypeReference() { }); return describeBinlogFileResult; diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java index e61bc601a7..0a2eb20e97 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/DatabaseTableMeta.java @@ -14,6 +14,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Pattern; +import com.alibaba.otter.canal.parse.driver.mysql.packets.server.FieldPacket; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.lang.ObjectUtils; import org.apache.commons.lang.StringUtils; @@ -22,8 +23,8 @@ import org.slf4j.MDC; import com.alibaba.druid.sql.repository.Schema; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONObject; import com.alibaba.otter.canal.filter.CanalEventFilter; import com.alibaba.otter.canal.parse.driver.mysql.packets.server.ResultSetPacket; import com.alibaba.otter.canal.parse.exception.CanalParseException; @@ -190,17 +191,29 @@ public Map snapshot() { private boolean dumpTableMeta(MysqlConnection connection, final CanalEventFilter filter) { try { ResultSetPacket packet = connection.query("show databases"); + int columnSize = packet.getFieldDescriptors().size(); + int columnIndex = 0; + for (; columnIndex < columnSize; columnIndex++) { + FieldPacket value = packet.getFieldDescriptors().get(columnIndex); + if (StringUtils.equalsIgnoreCase(value.getName(), "Database")) { + break; + } + } + List schemas = new ArrayList<>(); - schemas.addAll(packet.getFieldValues()); + for (int line = 0; line < packet.getFieldValues().size() / columnSize; line++) { + String schema = packet.getFieldValues().get(line * columnSize + columnIndex); + schemas.add(schema); + } for (String schema : schemas) { // filter views packet = connection.query("show full tables from `" + schema + "` where Table_type = 'BASE TABLE'"); + columnSize = packet.getFieldDescriptors().size(); + int tableNameColumnIndex = 0; // default index is 0 List tables = new ArrayList<>(); - for (String table : packet.getFieldValues()) { - if ("BASE TABLE".equalsIgnoreCase(table)) { - continue; - } + for (int line = 0; line < packet.getFieldValues().size() / columnSize; line++) { + String table = packet.getFieldValues().get(line * columnSize + tableNameColumnIndex); String fullName = schema + "." + table; if (blackFilter == null || !blackFilter.filter(fullName)) { if (filter == null || filter.filter(fullName)) { diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/MemoryTableMeta.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/MemoryTableMeta.java index 55aa702dc1..1d27f3704b 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/MemoryTableMeta.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/MemoryTableMeta.java @@ -80,7 +80,8 @@ public boolean apply(EntryPosition position, String schema, String ddl, String e && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "create user") && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "alter user") && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "drop user") - && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "create database")) { + && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "create database") + && !StringUtils.startsWithIgnoreCase(StringUtils.trim(ddl), "alter schema")) { repository.console(ddl); } } catch (Throwable e) { @@ -145,7 +146,7 @@ public boolean rollback(EntryPosition position) { public Map snapshot() { Map schemaDdls = new HashMap<>(); for (Schema schema : repository.getSchemas()) { - StringBuffer data = new StringBuffer(4 * 1024); + StringBuilder data = new StringBuilder(4 * 1024); for (String table : schema.showTables()) { SchemaObject schemaObject = schema.findTable(table); schemaObject.getStatement().output(data); diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/oceanbase/logproxy/LogProxyMessageParser.java b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/oceanbase/logproxy/LogProxyMessageParser.java index 6fcca377f1..356b16e1e7 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/oceanbase/logproxy/LogProxyMessageParser.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/inbound/oceanbase/logproxy/LogProxyMessageParser.java @@ -11,7 +11,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson2.JSONObject; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/parse/src/main/java/com/alibaba/otter/canal/parse/index/FileMixedLogPositionManager.java b/parse/src/main/java/com/alibaba/otter/canal/parse/index/FileMixedLogPositionManager.java index afac6cb273..011387008c 100644 --- a/parse/src/main/java/com/alibaba/otter/canal/parse/index/FileMixedLogPositionManager.java +++ b/parse/src/main/java/com/alibaba/otter/canal/parse/index/FileMixedLogPositionManager.java @@ -183,7 +183,7 @@ private LogPosition loadDataFromFile(File dataFile) { return null; } - String json = FileUtils.readFileToString(dataFile, charset.name()); + String json = FileUtils.readFileToString(dataFile, charset); return JsonUtils.unmarshalFromString(json, LogPosition.class); } catch (IOException e) { throw new CanalMetaManagerException(e); diff --git a/parse/src/test/java/com/alibaba/otter/canal/parse/DirectLogFetcherTest.java b/parse/src/test/java/com/alibaba/otter/canal/parse/DirectLogFetcherTest.java index 267c0d2243..2a5aac8e6a 100644 --- a/parse/src/test/java/com/alibaba/otter/canal/parse/DirectLogFetcherTest.java +++ b/parse/src/test/java/com/alibaba/otter/canal/parse/DirectLogFetcherTest.java @@ -13,6 +13,7 @@ import org.apache.commons.lang.StringUtils; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,20 +32,12 @@ import com.taobao.tddl.dbsync.binlog.LogContext; import com.taobao.tddl.dbsync.binlog.LogDecoder; import com.taobao.tddl.dbsync.binlog.LogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.FormatDescriptionLogEvent; -import com.taobao.tddl.dbsync.binlog.event.QueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RotateLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer; -import com.taobao.tddl.dbsync.binlog.event.RowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsQueryLogEvent; -import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent; +import com.taobao.tddl.dbsync.binlog.event.*; import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent.ColumnInfo; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.XidLogEvent; import com.taobao.tddl.dbsync.binlog.event.mariadb.AnnotateRowsEvent; +import com.taobao.tddl.dbsync.binlog.event.mariadb.BinlogCheckPointLogEvent; +@Ignore public class DirectLogFetcherTest { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); @@ -61,7 +54,7 @@ public void testSimple() { updateSettings(connector); loadBinlogChecksum(connector); sendRegisterSlave(connector, 3); - sendBinlogDump(connector, "mysql-bin.000001", 4L, 3); + sendBinlogDump(connector, "mysql-bin.000002", 4L, 3); fetcher.start(connector.getChannel()); @@ -76,45 +69,9 @@ public void testSimple() { if (event == null) { throw new RuntimeException("parse failed"); } - - int eventType = event.getHeader().getType(); - switch (eventType) { - case LogEvent.ROTATE_EVENT: - // binlogFileName = ((RotateLogEvent) - // event).getFilename(); - System.out.println(((RotateLogEvent) event).getFilename()); - break; - case LogEvent.TABLE_MAP_EVENT: - parseTableMapEvent((TableMapLogEvent) event); - break; - case LogEvent.WRITE_ROWS_EVENT_V1: - case LogEvent.WRITE_ROWS_EVENT: - parseRowsEvent((WriteRowsLogEvent) event); - break; - case LogEvent.UPDATE_ROWS_EVENT_V1: - case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: - case LogEvent.UPDATE_ROWS_EVENT: - parseRowsEvent((UpdateRowsLogEvent) event); - break; - case LogEvent.DELETE_ROWS_EVENT_V1: - case LogEvent.DELETE_ROWS_EVENT: - parseRowsEvent((DeleteRowsLogEvent) event); - break; - case LogEvent.QUERY_EVENT: - parseQueryEvent((QueryLogEvent) event); - break; - case LogEvent.ROWS_QUERY_LOG_EVENT: - parseRowsQueryEvent((RowsQueryLogEvent) event); - break; - case LogEvent.ANNOTATE_ROWS_EVENT: - break; - case LogEvent.XID_EVENT: - break; - default: - break; - } + processEvent(event, decoder, context); } - } catch (Exception e) { + } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { @@ -127,6 +84,56 @@ public void testSimple() { } + private void processEvent(LogEvent event, LogDecoder decoder, LogContext context) throws Throwable { + int eventType = event.getHeader().getType(); + switch (eventType) { + case LogEvent.ROTATE_EVENT: + // binlogFileName = ((RotateLogEvent) + // event).getFilename(); + System.out.println("RotateLogEvent : " + ((RotateLogEvent) event).getFilename()); + break; + case LogEvent.BINLOG_CHECKPOINT_EVENT: + // binlogFileName = ((BinlogCheckPointLogEvent) + // event).getFilename(); + System.out.println("BinlogCheckPointLogEvent : " + ((BinlogCheckPointLogEvent) event).getFilename()); + break; + case LogEvent.TABLE_MAP_EVENT: + parseTableMapEvent((TableMapLogEvent) event); + break; + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + parseRowsEvent((WriteRowsLogEvent) event); + break; + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + parseRowsEvent((UpdateRowsLogEvent) event); + break; + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + parseRowsEvent((DeleteRowsLogEvent) event); + break; + case LogEvent.QUERY_EVENT: + parseQueryEvent((QueryLogEvent) event); + break; + case LogEvent.ROWS_QUERY_LOG_EVENT: + parseRowsQueryEvent((RowsQueryLogEvent) event); + break; + case LogEvent.ANNOTATE_ROWS_EVENT: + break; + case LogEvent.XID_EVENT: + break; + case LogEvent.TRANSACTION_PAYLOAD_EVENT: + List events = decoder.processIterateDecode(event, context); + for (LogEvent deEvent : events) { + processEvent(deEvent, decoder, context); + } + break; + default: + break; + } + } + private void sendRegisterSlave(MysqlConnector connector, int slaveId) throws IOException { RegisterSlaveCommandPacket cmd = new RegisterSlaveCommandPacket(); cmd.reportHost = connector.getAddress().getAddress().getHostAddress(); @@ -300,7 +307,7 @@ protected void parseRowsEvent(RowsLogEvent event) { event.getHeader().getLogPos() - event.getHeader().getEventLen(), event.getTable().getDbName(), event.getTable().getTableName())); - RowsLogBuffer buffer = event.getRowsBuf(charset.name()); + RowsLogBuffer buffer = event.getRowsBuf(charset); BitSet columns = event.getColumns(); BitSet changeColumns = event.getChangeColumns(); while (buffer.nextOneRow(columns)) { diff --git a/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogDumpPerformanceTest.java b/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogDumpPerformanceTest.java index e6214bd627..d8957d6257 100644 --- a/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogDumpPerformanceTest.java +++ b/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogDumpPerformanceTest.java @@ -22,12 +22,12 @@ public class MysqlBinlogDumpPerformanceTest { public static void main(String args[]) { final MysqlEventParser controller = new MysqlEventParser(); - final EntryPosition startPosition = new EntryPosition("mysql-bin.000007", 89796293L, 100L); + final EntryPosition startPosition = new EntryPosition("binlog.000002", 4L, 100L); controller.setConnectionCharset("UTF-8"); controller.setSlaveId(3344L); controller.setDetectingEnable(false); controller.setFilterQueryDml(true); - controller.setMasterInfo(new AuthenticationInfo(new InetSocketAddress("100.81.154.142", 3306), "canal", "canal")); + controller.setMasterInfo(new AuthenticationInfo(new InetSocketAddress("127.0.0.1", 3306), "canal", "canal")); controller.setMasterPosition(startPosition); controller.setEnableTsdb(false); controller.setDestination("example"); diff --git a/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogParsePerformanceTest.java b/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogParsePerformanceTest.java index 58a9a9a465..390d9e9587 100644 --- a/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogParsePerformanceTest.java +++ b/parse/src/test/java/com/alibaba/otter/canal/parse/MysqlBinlogParsePerformanceTest.java @@ -5,10 +5,13 @@ import java.net.InetSocketAddress; import java.nio.charset.Charset; import java.util.BitSet; +import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.atomic.AtomicLong; +import org.junit.Ignore; + import com.alibaba.otter.canal.parse.driver.mysql.MysqlConnector; import com.alibaba.otter.canal.parse.driver.mysql.MysqlUpdateExecutor; import com.alibaba.otter.canal.parse.driver.mysql.packets.HeaderPacket; @@ -19,14 +22,8 @@ import com.taobao.tddl.dbsync.binlog.LogContext; import com.taobao.tddl.dbsync.binlog.LogDecoder; import com.taobao.tddl.dbsync.binlog.LogEvent; -import com.taobao.tddl.dbsync.binlog.event.DeleteRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.RowsLogBuffer; -import com.taobao.tddl.dbsync.binlog.event.RowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent; +import com.taobao.tddl.dbsync.binlog.event.*; import com.taobao.tddl.dbsync.binlog.event.TableMapLogEvent.ColumnInfo; -import com.taobao.tddl.dbsync.binlog.event.UpdateRowsLogEvent; -import com.taobao.tddl.dbsync.binlog.event.WriteRowsLogEvent; -import org.junit.Ignore; @Ignore public class MysqlBinlogParsePerformanceTest { @@ -44,7 +41,7 @@ public static void main(String args[]) { Thread thread = new Thread(() -> { try { consumer(buffer); - } catch (IOException | InterruptedException e) { + } catch (Throwable e) { e.printStackTrace(); } }); @@ -59,7 +56,7 @@ public static void main(String args[]) { } } - public static void consumer(BlockingQueue buffer) throws IOException, InterruptedException { + public static void consumer(BlockingQueue buffer) throws Throwable { LogDecoder decoder = new LogDecoder(LogEvent.UNKNOWN_EVENT, LogEvent.ENUM_END_EVENT); LogContext context = new LogContext(); @@ -70,33 +67,7 @@ public static void consumer(BlockingQueue buffer) throws IOException, while (true) { LogEvent event = null; event = decoder.decode(buffer.take(), context); - int eventType = event.getHeader().getType(); - switch (eventType) { - case LogEvent.ROTATE_EVENT: - break; - case LogEvent.WRITE_ROWS_EVENT_V1: - case LogEvent.WRITE_ROWS_EVENT: - parseRowsEvent((WriteRowsLogEvent) event, sum); - break; - case LogEvent.UPDATE_ROWS_EVENT_V1: - case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: - case LogEvent.UPDATE_ROWS_EVENT: - parseRowsEvent((UpdateRowsLogEvent) event, sum); - break; - case LogEvent.DELETE_ROWS_EVENT_V1: - case LogEvent.DELETE_ROWS_EVENT: - parseRowsEvent((DeleteRowsLogEvent) event, sum); - break; - case LogEvent.XID_EVENT: - sum.incrementAndGet(); - break; - case LogEvent.QUERY_EVENT: - sum.incrementAndGet(); - break; - default: - break; - } - + processEvent(event, decoder, context, sum); long current = sum.get(); if (current - last >= 100000) { end = System.currentTimeMillis(); @@ -108,6 +79,41 @@ public static void consumer(BlockingQueue buffer) throws IOException, } } + private static void processEvent(LogEvent event, LogDecoder decoder, LogContext context, AtomicLong sum) throws Throwable { + int eventType = event.getHeader().getType(); + switch (eventType) { + case LogEvent.ROTATE_EVENT: + break; + case LogEvent.WRITE_ROWS_EVENT_V1: + case LogEvent.WRITE_ROWS_EVENT: + parseRowsEvent((WriteRowsLogEvent) event, sum); + break; + case LogEvent.UPDATE_ROWS_EVENT_V1: + case LogEvent.PARTIAL_UPDATE_ROWS_EVENT: + case LogEvent.UPDATE_ROWS_EVENT: + parseRowsEvent((UpdateRowsLogEvent) event, sum); + break; + case LogEvent.DELETE_ROWS_EVENT_V1: + case LogEvent.DELETE_ROWS_EVENT: + parseRowsEvent((DeleteRowsLogEvent) event, sum); + break; + case LogEvent.XID_EVENT: + sum.incrementAndGet(); + break; + case LogEvent.QUERY_EVENT: + sum.incrementAndGet(); + break; + case LogEvent.TRANSACTION_PAYLOAD_EVENT: + List events = decoder.processIterateDecode(event, context); + for (LogEvent deEvent : events) { + processEvent(deEvent, decoder, context, sum); + } + break; + default: + break; + } + } + private static void sendBinlogDump(MysqlConnector connector, String binlogfilename, Long binlogPosition, int slaveId) throws IOException { BinlogDumpCommandPacket binlogDumpCmd = new BinlogDumpCommandPacket(); @@ -134,7 +140,7 @@ public static void update(String cmd, MysqlConnector connector) throws IOExcepti public static void parseRowsEvent(RowsLogEvent event, AtomicLong sum) { try { - RowsLogBuffer buffer = event.getRowsBuf(charset.name()); + RowsLogBuffer buffer = event.getRowsBuf(charset); BitSet columns = event.getColumns(); BitSet changeColumns = event.getChangeColumns(); while (buffer.nextOneRow(columns)) { diff --git a/parse/src/test/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/FastsqlSchemaTest.java b/parse/src/test/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/FastsqlSchemaTest.java index 9c892db916..45a245cbf2 100644 --- a/parse/src/test/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/FastsqlSchemaTest.java +++ b/parse/src/test/java/com/alibaba/otter/canal/parse/inbound/mysql/tsdb/FastsqlSchemaTest.java @@ -78,4 +78,111 @@ public void test_persistent() throws Throwable { System.out.println(table.getStatement().toString()); Assert.assertTrue(table.findColumn("c1") != null); } + + @Test + public void test_primaryKey() throws Throwable { + SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL); + { + String sql1 = "CREATE TABLE test ( id NOT NULL, name varchar(32) ) ENGINE=InnoDB; "; + String sql2 = " ALTER TABLE test add primary key(id);"; + repository.console(sql1); + String rs = repository.console(sql2); + System.out.println(rs); + repository.setDefaultSchema("test"); + SchemaObject table = repository.findTable("test"); + Assert.assertTrue(table.findColumn("id").isOnlyPrimaryKey()); + } + + { + String sql1 = "CREATE TABLE test ( id NOT NULL, name varchar(32) ) ENGINE=InnoDB; "; + String sql2 = "ALTER TABLE test MODIFY id bigint AUTO_INCREMENT PRIMARY KEY; "; + repository.console(sql1); + repository.console(sql2); + repository.setDefaultSchema("test"); + SchemaObject table = repository.findTable("test"); + Assert.assertTrue(table.findColumn("id").isOnlyPrimaryKey()); + Assert.assertTrue(table.findColumn("id").isAutoIncrement()); + } + } + + @Test + public void test_partition_table() throws Throwable { + SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL); + String sql1 = "create table test (\n" + " id int not null AUTO_INCREMENT primary key,\n" + + " name varchar(32) \n" + " )\n" + " partition by range(id) (\n" + + " partition p1 values less than (10),\n" + " partition px values less than MAXVALUE\n" + + " );"; + String sql2 = "alter table test add partition ( partition 2 VALUES LESS THAN (738552) ENGINE = InnoDB, PARTITION pmax VALUES LESS THAN MAXVALUE ENGINE = InnoDB)"; + repository.console(sql1); + repository.console(sql2); + repository.setDefaultSchema("test"); + SchemaObject table = repository.findTable("test"); + Assert.assertTrue(table != null); + } + + @Test + public void test_mariadb_aria() throws Throwable { + SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL); + String sql1 = "CREATE TABLE test (\n" + "db_name varchar(64) COLLATE utf8_bin NOT NULL,\n" + + "table_name varchar(64) COLLATE utf8_bin NOT NULL,\n" + + "column_name varchar(64) COLLATE utf8_bin NOT NULL,\n" + + "min_value varbinary(255) DEFAULT NULL,\n" + "max_value varbinary(255) DEFAULT NULL,\n" + + "nulls_ratio decimal(12,4) DEFAULT NULL,\n" + "avg_length decimal(12,4) DEFAULT NULL,\n" + + "avg_frequency decimal(12,4) DEFAULT NULL,\n" + "hist_size tinyint(3) unsigned DEFAULT NULL,\n" + + "hist_type enum('SINGLE_PREC_HB','DOUBLE_PREC_HB') COLLATE utf8_bin DEFAULT NULL,\n" + + "histogram varbinary(255) DEFAULT NULL,\n" + "PRIMARY KEY (db_name,table_name,column_name)\n" + + ") ENGINE=Aria DEFAULT CHARSET=utf8 COLLATE=utf8_bin PAGE_CHECKSUM=1 TRANSACTIONAL=0"; + repository.console(sql1); + repository.setDefaultSchema("test"); + SchemaObject table = repository.findTable("test"); + Assert.assertTrue(table != null); + } + + @Test + public void test_polardb_x() throws Throwable { + SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL); + repository.setDefaultSchema("test"); + + String sql1 = "CREATE TABLE `test1` (\n" + " `id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,\n" + + " `serialNo` varchar(64) CHARACTER SET utf8mb4 NOT NULL DEFAULT '',\n" + + " `user_id` int(11) DEFAULT NULL COMMENT '用户id',\n" + " PRIMARY KEY (`id`)\n" + + ") ENGINE = InnoDB PARTITION BY KEY(`tenant_id`,`id`)\n" + "PARTITIONS 21 tablegroup = `tg_p_msg`"; + repository.console(sql1); + SchemaObject table = repository.findTable("test1"); + Assert.assertTrue(table != null); + + + String sql2 = "CREATE TABLE `test2` (\n" + " `id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,\n" + + " `serialNo` varchar(64) CHARACTER SET utf8mb4 NOT NULL DEFAULT '',\n" + + " `user_id` int(11) DEFAULT NULL COMMENT '用户id',\n" + " PRIMARY KEY (`id`)\n" + + ") ENGINE = InnoDB single"; + repository.console(sql2); + table = repository.findTable("test2"); + Assert.assertTrue(table != null); + + + String sql3 = "CREATE TABLE `test3` (\n" + " `id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,\n" + + " `serialNo` varchar(64) CHARACTER SET utf8mb4 NOT NULL DEFAULT '',\n" + + " `user_id` int(11) DEFAULT NULL COMMENT '用户id',\n" + " PRIMARY KEY (`id`)\n" + + ") ENGINE = InnoDB locality = 'dn=polardbx-ng28-dn-1,polardbx-ng28-dn-2'"; + repository.console(sql3); + table = repository.findTable("test3"); + Assert.assertTrue(table != null); + + String sql4 = "CREATE TABLE test4(\n" + " order_id int AUTO_INCREMENT primary key,\n" + + " customer_id int,\n" + " country varchar(64),\n" + " city varchar(64),\n" + + " order_time datetime not null)\n" + "PARTITION BY LIST COLUMNS(country,city)\n" + "(\n" + + " PARTITION p1 VALUES IN (('China','Shanghai')) LOCALITY = 'dn=polardbx-ng28-dn-2',\n" + + " PARTITION p2 VALUES IN (('China','Beijing')) LOCALITY = 'dn=polardbx-ng28-dn-2',\n" + + " PARTITION p3 VALUES IN (('China','Hangzhou')) ,\n" + + " PARTITION p4 VALUES IN (('China','Nanjing')) ,\n" + + " PARTITION p5 VALUES IN (('China','Guangzhou')) ,\n" + + " PARTITION p6 VALUES IN (('China','Shenzhen')) ,\n" + + " PARTITION p7 VALUES IN (('China','Wuhan')) ,\n" + + " PARTITION p8 VALUES IN (('America','New York'))\n" + + ") LOCALITY = 'dn=polardbx-ng28-dn-0,polardbx-ng28-dn-1';"; + repository.console(sql4); + table = repository.findTable("test4"); + Assert.assertTrue(table != null); + } } diff --git a/pom.xml b/pom.xml index 9bba000bc7..d7845dcc4a 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ canal pom canal module for otter ${project.version} - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT https://github.com/alibaba/canal org.sonatype.oss @@ -58,7 +58,7 @@ java.net - http://download.java.net/maven/2/ + https://download.java.net/maven/2/ true @@ -107,9 +107,14 @@ 1.8 UTF-8 true - 5.0.5.RELEASE - 1.2.9 - 1.7.12 + 5.3.9 + 2.17.0 + 4.8.0 + 5.5.0 + 1.0.3 + 2.4.0 + 2.8.1 + 5.1.48 0.8.3 2.22.1 -server -Xms512m -Xmx1024m -Dfile.encoding=UTF-8 @@ -117,6 +122,7 @@ 3.7.0 1.3.2 + true @@ -143,29 +149,59 @@ - org.springframework - spring-core - ${spring_version} + org.springframework + spring-core + ${spring_version} + + + org.springframework + spring-aop + ${spring_version} + + + org.springframework + spring-context + ${spring_version} + + + org.springframework + spring-jdbc + ${spring_version} + + + org.springframework + spring-orm + ${spring_version} - org.springframework - spring-aop - ${spring_version} + org.springframework + spring-tx + ${spring_version} + + + org.springframework + spring-web + ${spring_version} + + + org.springframework + spring-webmvc + ${spring_version} - org.springframework - spring-context - ${spring_version} + org.springframework + spring-jcl + ${spring_version} - org.springframework - spring-jdbc - ${spring_version} + org.springframework + spring-expression + ${spring_version} - org.springframework - spring-orm - ${spring_version} + org.springframework + spring-beans + ${spring_version} org.springframework @@ -179,6 +215,11 @@ commons-lang 2.6 + + org.apache.commons + commons-lang3 + 3.7 + commons-io commons-io @@ -194,20 +235,30 @@ commons-codec 1.15 + + commons-beanutils + commons-beanutils + 1.9.4 + org.apache.commons commons-compress - 1.21 + 1.22 + + + com.github.luben + zstd-jni + 1.5.2-5 org.apache.httpcomponents httpclient - 4.5.1 + 4.5.13 org.apache.zookeeper zookeeper - 3.4.5 + 3.5.6 log4j @@ -225,6 +276,14 @@ jline jline + + io.netty + netty-handler + + + io.netty + netty-transport-native-epoll + @@ -233,9 +292,9 @@ 0.10 - com.alibaba - fastjson - 1.2.70 + com.alibaba.fastjson2 + fastjson2 + 2.0.31 com.google.guava @@ -255,7 +314,12 @@ io.netty netty-all - 4.1.6.Final + 4.1.68.Final + + + org.jboss.netty + netty + 3.2.10.Final com.google.protobuf @@ -270,12 +334,12 @@ org.mybatis mybatis - 3.5.4 + 3.5.6 com.alibaba druid - 1.2.14 + 1.2.17 com.lmax @@ -286,28 +350,59 @@ ch.qos.logback logback-core - ${logback_version} + 1.2.8 ch.qos.logback logback-classic - ${logback_version} + 1.2.8 org.slf4j jcl-over-slf4j - ${slf4j_version} + 1.7.12 org.slf4j slf4j-api - ${slf4j_version} + 1.7.12 com.h2database h2 - 1.4.196 + 2.1.210 + + + mysql + mysql-connector-java + ${mysql_driver_version} + + + com.google.code.findbugs + jsr305 + 3.0.2 + + + javax.annotation + javax.annotation-api + ${javax.annotation-api.version} + + org.apache.logging.log4j + log4j-core + ${log4j_version} + + + org.apache.logging.log4j + log4j-api + ${log4j_version} + + + org.apache.logging.log4j + log4j-to-slf4j + ${log4j_version} + + junit @@ -443,6 +538,7 @@ classes true + ${closeTestReports} diff --git a/prometheus/pom.xml b/prometheus/pom.xml index 7479fd91d6..21f9e13a74 100644 --- a/prometheus/pom.xml +++ b/prometheus/pom.xml @@ -3,13 +3,13 @@ canal com.alibaba.otter - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT 4.0.0 com.alibaba.otter canal.prometheus - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT canal prometheus module for otter ${project.version} diff --git a/protocol/pom.xml b/protocol/pom.xml index 86b61b26e2..4e47adce2c 100644 --- a/protocol/pom.xml +++ b/protocol/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/protocol/src/main/java/com/alibaba/otter/canal/protocol/FlatMessage.java b/protocol/src/main/java/com/alibaba/otter/canal/protocol/FlatMessage.java index 0957b75943..90f93af1a0 100644 --- a/protocol/src/main/java/com/alibaba/otter/canal/protocol/FlatMessage.java +++ b/protocol/src/main/java/com/alibaba/otter/canal/protocol/FlatMessage.java @@ -27,6 +27,7 @@ public class FlatMessage implements Serializable { private Map mysqlType; private List> data; private List> old; + private String gtid; public FlatMessage() { } @@ -146,10 +147,18 @@ public void setEs(Long es) { this.es = es; } + public String getGtid() { + return gtid; + } + + public void setGtid(String gtid) { + this.gtid = gtid; + } + @Override public String toString() { return "FlatMessage [id=" + id + ", database=" + database + ", table=" + table + ", isDdl=" + isDdl + ", type=" + type + ", es=" + es + ", ts=" + ts + ", sql=" + sql + ", sqlType=" + sqlType + ", mysqlType=" - + mysqlType + ", data=" + data + ", old=" + old + "]"; + + mysqlType + ", data=" + data + ", old=" + old + ", gtid=" + gtid +"]"; } } diff --git a/server/src/test/java/com/alibaba/otter/canal/server/ProtocolTest.java b/protocol/src/test/java/com/alibaba/otter/canal/protocol/ProtocolTest.java similarity index 98% rename from server/src/test/java/com/alibaba/otter/canal/server/ProtocolTest.java rename to protocol/src/test/java/com/alibaba/otter/canal/protocol/ProtocolTest.java index da479e20f2..dae8b115b7 100644 --- a/server/src/test/java/com/alibaba/otter/canal/server/ProtocolTest.java +++ b/protocol/src/test/java/com/alibaba/otter/canal/protocol/ProtocolTest.java @@ -1,4 +1,4 @@ -package com.alibaba.otter.canal.server; +package com.alibaba.otter.canal.protocol; import com.alibaba.otter.canal.protocol.CanalEntry.Entry; import com.alibaba.otter.canal.protocol.CanalEntry.EntryType; diff --git a/server/src/test/java/com/alibaba/otter/canal/server/SecurityUtilTest.java b/protocol/src/test/java/com/alibaba/otter/canal/protocol/SecurityUtilTest.java similarity index 94% rename from server/src/test/java/com/alibaba/otter/canal/server/SecurityUtilTest.java rename to protocol/src/test/java/com/alibaba/otter/canal/protocol/SecurityUtilTest.java index 69e60ae464..92c5df4c27 100644 --- a/server/src/test/java/com/alibaba/otter/canal/server/SecurityUtilTest.java +++ b/protocol/src/test/java/com/alibaba/otter/canal/protocol/SecurityUtilTest.java @@ -1,4 +1,4 @@ -package com.alibaba.otter.canal.server; +package com.alibaba.otter.canal.protocol; import java.security.NoSuchAlgorithmException; diff --git a/server/pom.xml b/server/pom.xml index 22140500a9..88b2ab4848 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml canal.server @@ -33,7 +33,6 @@ org.apache.commons commons-lang3 - 3.7 diff --git a/server/src/main/java/com/alibaba/otter/canal/server/embedded/CanalServerWithEmbedded.java b/server/src/main/java/com/alibaba/otter/canal/server/embedded/CanalServerWithEmbedded.java index a6515231f6..aba22002f1 100644 --- a/server/src/main/java/com/alibaba/otter/canal/server/embedded/CanalServerWithEmbedded.java +++ b/server/src/main/java/com/alibaba/otter/canal/server/embedded/CanalServerWithEmbedded.java @@ -7,6 +7,7 @@ import java.util.Map; import java.util.ServiceLoader; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -72,11 +73,12 @@ public void start() { if (!isStart()) { super.start(); // 如果存在provider,则启动metrics service - loadCanalMetrics(); - metrics.setServerPort(metricsPort); - metrics.initialize(); + if(metricsPort > 0) { + loadCanalMetrics(); + metrics.setServerPort(metricsPort); + metrics.initialize(); + } canalInstances = MigrateMap.makeComputingMap(destination -> canalInstanceGenerator.generate(destination)); - // lastRollbackPostions = new MapMaker().makeMap(); } } @@ -266,9 +268,10 @@ public Message get(ClientIdentity clientIdentity, int batchSize, Long timeout, T boolean raw = isRaw(canalInstance.getEventStore()); List entrys = null; if (raw) { - entrys = Lists.transform(events.getEvents(), Event::getRawEntry); + // new list + entrys = events.getEvents().stream().map(Event::getRawEntry).collect(Collectors.toList()); } else { - entrys = Lists.transform(events.getEvents(), Event::getEntry); + entrys = events.getEvents().stream().map(Event::getEntry).collect(Collectors.toList()); } if (logger.isInfoEnabled()) { logger.info("get successfully, clientId:{} batchSize:{} real size is {} and result is [batchId:{} , position:{}]", @@ -348,9 +351,10 @@ public Message getWithoutAck(ClientIdentity clientIdentity, int batchSize, Long boolean raw = isRaw(canalInstance.getEventStore()); List entrys = null; if (raw) { - entrys = Lists.transform(events.getEvents(), Event::getRawEntry); + // new list + entrys = events.getEvents().stream().map(Event::getRawEntry).collect(Collectors.toList()); } else { - entrys = Lists.transform(events.getEvents(), Event::getEntry); + entrys = events.getEvents().stream().map(Event::getEntry).collect(Collectors.toList()); } if (logger.isInfoEnabled()) { logger.info("getWithoutAck successfully, clientId:{} batchSize:{} real size is {} and result is [batchId:{} , position:{}]", diff --git a/sink/pom.xml b/sink/pom.xml index 284f5d5b3e..7cd45b9832 100644 --- a/sink/pom.xml +++ b/sink/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter diff --git a/store/pom.xml b/store/pom.xml index 189fde7609..7dae866484 100644 --- a/store/pom.xml +++ b/store/pom.xml @@ -3,7 +3,7 @@ com.alibaba.otter canal - 1.1.6-SNAPSHOT + 1.1.8-SNAPSHOT ../pom.xml com.alibaba.otter