diff --git a/dubbo-api-docs/dubbo-api-docs-examples/examples-provider/pom.xml b/dubbo-api-docs/dubbo-api-docs-examples/examples-provider/pom.xml index a60c2f70a..e2a424f6c 100644 --- a/dubbo-api-docs/dubbo-api-docs-examples/examples-provider/pom.xml +++ b/dubbo-api-docs/dubbo-api-docs-examples/examples-provider/pom.xml @@ -103,7 +103,7 @@ org.apache.dubbo - dubbo-dependencies-zookeeper + dubbo-dependencies-zookeeper-curator5 pom diff --git a/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1InvokerTest.java b/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1InvokerTest.java index 68589bab7..4a01a9c9b 100644 --- a/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1InvokerTest.java +++ b/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1InvokerTest.java @@ -16,13 +16,147 @@ */ package org.apache.dubbo.rpc.cluster.support; +import org.apache.dubbo.common.URL; +import org.apache.dubbo.rpc.AppResponse; +import org.apache.dubbo.rpc.Invocation; +import org.apache.dubbo.rpc.Invoker; +import org.apache.dubbo.rpc.Result; +import org.apache.dubbo.rpc.RpcException; +import org.apache.dubbo.rpc.RpcInvocation; +import org.apache.dubbo.rpc.cluster.Directory; + +import java.util.Arrays; +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -class BroadcastCluster1InvokerTest { +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.mock; + +/** + * @see BroadcastCluster1Invoker + */ +public class BroadcastCluster1InvokerTest { + private URL url; + private Directory dic; + private RpcInvocation invocation; + private BroadcastCluster1Invoker clusterInvoker; + + private MockInvoker invoker1; + private MockInvoker invoker2; + private MockInvoker invoker3; + private MockInvoker invoker4; + + @BeforeEach + public void setUp() throws Exception { + + dic = mock(Directory.class); + + invoker1 = new MockInvoker(); + invoker2 = new MockInvoker(); + invoker3 = new MockInvoker(); + invoker4 = new MockInvoker(); + + url = URL.valueOf("test://127.0.0.1:8080/test"); + given(dic.getUrl()).willReturn(url); + given(dic.getConsumerUrl()).willReturn(url); + given(dic.getInterface()).willReturn(DemoService.class); + + invocation = new RpcInvocation(); + invocation.setMethodName("test"); + + clusterInvoker = new BroadcastCluster1Invoker(dic); + } + + @Test + void testNormal() { + given(dic.list(invocation)).willReturn(Arrays.asList(invoker1, invoker2, invoker3, invoker4)); + // Every invoker will be called + clusterInvoker.invoke(invocation); + assertTrue(invoker1.isInvoked()); + assertTrue(invoker2.isInvoked()); + assertTrue(invoker3.isInvoked()); + assertTrue(invoker4.isInvoked()); + } + + @Test + void testEx() { + given(dic.list(invocation)).willReturn(Arrays.asList(invoker1, invoker2, invoker3, invoker4)); + invoker1.invokeThrowEx(); + invoker2.invokeThrowEx(); + invoker3.invokeThrowEx(); + invoker4.invokeThrowEx(); + Throwable exception = clusterInvoker.invoke(invocation).getException(); + assertInstanceOf(RpcException.class, exception); + assertTrue(exception.getMessage().contains("java.lang.NullPointerException")); + assertTrue(invoker1.isInvoked()); + assertTrue(invoker2.isInvoked()); + assertTrue(invoker3.isInvoked()); + assertTrue(invoker4.isInvoked()); + } @Test - void doInvoke() { - //todo + void testFailByOneInvoker() { + given(dic.list(invocation)).willReturn(Arrays.asList(invoker1, invoker2, invoker3, invoker4)); + // Once there is an exception, the result is abnormal. + invoker1.invokeThrowEx(); + Throwable exception = clusterInvoker.invoke(invocation).getException(); + assertInstanceOf(RpcException.class, exception); + assertTrue(exception.getMessage().contains("java.lang.NullPointerException")); + } + + @Test + void testNoProvider() { + given(dic.list(invocation)).willReturn(Collections.emptyList()); + RpcException exception = assertThrows(RpcException.class, () -> + clusterInvoker.invoke(invocation) + ); + assertTrue(exception.getMessage().contains("No provider available")); + } +} + +class MockInvoker implements Invoker { + private URL url = URL.valueOf("test://127.0.0.1:8080/test"); + private boolean throwEx = false; + private boolean invoked = false; + + @Override + public URL getUrl() { + return url; + } + + @Override + public boolean isAvailable() { + return false; + } + + @Override + public void destroy() {} + + @Override + public Class getInterface() { + return DemoService.class; + } + + @Override + public Result invoke(Invocation invocation) throws RpcException { + invoked = true; + if (throwEx) { + throwEx = false; + throw new RpcException(); + } + return new AppResponse("sucess"); + } + + public void invokeThrowEx() { + throwEx = true; + } + public boolean isInvoked() { + return invoked; } } diff --git a/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1Test.java b/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/DemoService.java similarity index 86% rename from dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1Test.java rename to dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/DemoService.java index 559e3faab..e2d94649b 100644 --- a/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadcastCluster1Test.java +++ b/dubbo-cluster-extensions/dubbo-cluster-broadcast-1/src/test/java/org/apache/dubbo/rpc/cluster/support/DemoService.java @@ -16,11 +16,11 @@ */ package org.apache.dubbo.rpc.cluster.support; -import org.junit.jupiter.api.Test; - -class BroadcastCluster1Test { +/** + * TestService + */ +public interface DemoService { + String sayHello(String name); - @Test - void doJoin() { - } + int plus(int a, int b); } diff --git a/dubbo-extensions-dependencies-bom/pom.xml b/dubbo-extensions-dependencies-bom/pom.xml index 25fa3d5a5..f67f133b7 100644 --- a/dubbo-extensions-dependencies-bom/pom.xml +++ b/dubbo-extensions-dependencies-bom/pom.xml @@ -102,7 +102,7 @@ 2.0 1.4.3 0.12.0 - 3.10.0 + 5.1.0 0.13.0 3.17.0 2.2.7 @@ -185,7 +185,7 @@ org.apache.dubbo - dubbo-dependencies-zookeeper + dubbo-dependencies-zookeeper-curator5 ${dubbo.version} pom import @@ -325,6 +325,11 @@ fury-core ${apache.fury_version} + + org.apache.fury + fury-core + ${apache.fury_version} + com.fasterxml.jackson.core jackson-core @@ -663,7 +668,13 @@ ${maven_flatten_version} true - resolveCiFriendliesOnly + bom + + expand + remove + remove + remove + diff --git a/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/OmnSerFilterTest.java b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/OmnSerFilterTest.java new file mode 100644 index 000000000..c9eed1054 --- /dev/null +++ b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/OmnSerFilterTest.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.gateway.consumer.test; + +import org.apache.dubbo.common.beanutil.JavaBeanDescriptor; +import org.apache.dubbo.common.beanutil.JavaBeanSerializeUtil; +import org.apache.dubbo.common.constants.CommonConstants; +import org.apache.dubbo.common.extension.Activate; +import org.apache.dubbo.common.logger.Logger; +import org.apache.dubbo.common.logger.LoggerFactory; +import org.apache.dubbo.common.utils.ReflectUtils; +import org.apache.dubbo.gateway.consumer.filter.OmnSerFilter; +import org.apache.dubbo.rpc.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.dubbo.gateway.common.OmnipotentCommonConstants.SPECIFY_ADDRESS; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.junit.jupiter.api.Assertions.*; + +public class OmnSerFilterTest { + + @InjectMocks + private OmnSerFilter omnSerFilter; + + @Mock + private Invoker> invoker; + + @Mock + private Invocation invocation; + + @Mock + private Result result; + + @BeforeEach + public void setup() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void testInvokeWithSpecifyAddress() throws RpcException { + // Set up + when(invocation.get(SPECIFY_ADDRESS)).thenReturn("127.0.0.1"); + when(invoker.invoke(any(Invocation.class))).thenReturn(result); + + // Invoke + Result actual = omnSerFilter.invoke(invoker, invocation); + + // Assert + assertNotNull(actual); + // Additional assertions can be added based on expected behavior + } + + @Test + public void testInvokeWithoutSpecifyAddress() throws RpcException { + // Set up + when(invocation.get(SPECIFY_ADDRESS)).thenReturn(null); + when(invoker.invoke(any(Invocation.class))).thenReturn(result); + + // Invoke + Result actual = omnSerFilter.invoke(invoker, invocation); + + // Assert + assertNotNull(actual); + // Additional assertions can be added based on expected behavior + } + + @Test + public void testOnResponseWithPrimitives() { + // Set up + Object primitives = new Integer(10); + when(result.getValue()).thenReturn(primitives); + + // Invoke + omnSerFilter.onResponse(result, invoker, invocation); + + // Assert + assertEquals(primitives, result.getValue()); + // Additional assertions can be added based on expected behavior for primitive types + } + + @Test + public void testOnResponseWithPojo() { + // Set up + JavaBeanDescriptor pojo = new JavaBeanDescriptor(); + when(result.getValue()).thenReturn(pojo); + + // Invoke + omnSerFilter.onResponse(result, invoker, invocation); + + // Assert + assertSame(pojo, result.getValue()); + // Additional assertions can be added based on expected behavior for pojo types + } + + @Test + public void testOnResponseWithCollection() { + // Set up + Collection collection = new ArrayList<>(); + JavaBeanDescriptor descriptor = new JavaBeanDescriptor("org.apache.dubbo.gateway.consumer.test.Person",1); + descriptor.setProperty("name", "org.apache.dubbo.gateway.consumer.test.Person"); + collection.add(descriptor); + when(result.getValue()).thenReturn(collection); + + // Invoke + omnSerFilter.onResponse(result, invoker, invocation); + + // Assert + // Ensure the collection is unchanged + assertEquals(1, ((Collection) result.getValue()).size()); + assertEquals(Person.class.toString(), ((Collection) result.getValue()).iterator().next().toString()); + } + + @Test + public void testOnResponseWithMap() { + // Set up + Map map = new HashMap<>(); + JavaBeanDescriptor descriptor = new JavaBeanDescriptor("org.apache.dubbo.gateway.consumer.test.Person", 1); + map.put("org.apache.dubbo.gateway.consumer.test.Person", descriptor); + when(result.getValue()).thenReturn(map); + descriptor.setProperty("name", "org.apache.dubbo.gateway.consumer.test.Person"); + + // Invoke + omnSerFilter.onResponse(result, invoker, invocation); + + // Assert + // Additional assertions can be added based on expected behavior for map types + assertEquals(1, ((Map) result.getValue()).size()); + assertEquals(Person.class.toString(), (((Map) result.getValue()).get("org.apache.dubbo.gateway.consumer.test.Person").toString())); + + } +} diff --git a/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/Person.java b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/Person.java new file mode 100644 index 000000000..5d3bdc263 --- /dev/null +++ b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/java/org/apache/dubbo/gateway/consumer/test/Person.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.gateway.consumer.test; + +import java.io.Serializable; +import java.util.Arrays; + +public class Person implements Serializable { + byte oneByte = 123; + private String name = "name1"; + private int age = 11; + + private String[] value = {"value1", "value2"}; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public byte getOneByte() { + return oneByte; + } + + public void setOneByte(byte b) { + this.oneByte = b; + } + + public int getAge() { + return age; + } + + public void setAge(int age) { + this.age = age; + } + + public String[] getValue() { + return value; + } + + public void setValue(String[] value) { + this.value = value; + } + + @Override + public String toString() { + return String.format("Person name(%s) age(%d) byte(%s) [value=%s]", name, age, oneByte, Arrays.toString(value)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + age; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + Arrays.hashCode(value); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Person other = (Person) obj; + if (age != other.age) + return false; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + if (!Arrays.equals(value, other.value)) + return false; + return true; + } +} diff --git a/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/resources/security/serialize.allowlist b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/resources/security/serialize.allowlist new file mode 100644 index 000000000..3d2f1d09d --- /dev/null +++ b/dubbo-gateway-extensions/dubbo-gateway-consumer/src/test/resources/security/serialize.allowlist @@ -0,0 +1,20 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +org.apache.dubbo.gateway.consumer.test.Person diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/pom.xml b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/pom.xml new file mode 100644 index 000000000..00e163388 --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + + org.apache.dubbo.extensions + dubbo-metadata-report-extensions + ${revision} + + + dubbo-metadata-report-redis + + + + org.apache.dubbo + dubbo-metadata-api + true + + + redis.clients + jedis + + + com.github.codemonstur + embedded-redis + test + + + org.apache.commons + commons-lang3 + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReport.java b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReport.java new file mode 100644 index 000000000..3ecdab15c --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReport.java @@ -0,0 +1,556 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.metadata.store.redis; + +import org.apache.dubbo.common.URL; +import org.apache.dubbo.common.config.configcenter.ConfigItem; +import org.apache.dubbo.common.logger.ErrorTypeAwareLogger; +import org.apache.dubbo.common.logger.LoggerFactory; +import org.apache.dubbo.common.utils.ConcurrentHashMapUtils; +import org.apache.dubbo.common.utils.ConcurrentHashSet; +import org.apache.dubbo.common.utils.JsonUtils; +import org.apache.dubbo.common.utils.StringUtils; +import org.apache.dubbo.common.utils.CollectionUtils; +import org.apache.dubbo.metadata.MappingChangedEvent; +import org.apache.dubbo.metadata.MappingListener; +import org.apache.dubbo.metadata.MetadataInfo; +import org.apache.dubbo.metadata.ServiceNameMapping; +import org.apache.dubbo.metadata.report.identifier.BaseMetadataIdentifier; +import org.apache.dubbo.metadata.report.identifier.KeyTypeEnum; +import org.apache.dubbo.metadata.report.identifier.MetadataIdentifier; +import org.apache.dubbo.metadata.report.identifier.ServiceMetadataIdentifier; +import org.apache.dubbo.metadata.report.identifier.SubscriberMetadataIdentifier; +import org.apache.dubbo.metadata.report.support.AbstractMetadataReport; +import org.apache.dubbo.rpc.RpcException; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import redis.clients.jedis.HostAndPort; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisCluster; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.JedisPoolConfig; +import redis.clients.jedis.JedisPubSub; +import redis.clients.jedis.Transaction; +import redis.clients.jedis.params.SetParams; +import redis.clients.jedis.util.JedisClusterCRC16; + +import static org.apache.dubbo.common.constants.CommonConstants.CLUSTER_KEY; +import static org.apache.dubbo.common.constants.CommonConstants.CYCLE_REPORT_KEY; +import static org.apache.dubbo.common.constants.CommonConstants.DEFAULT_TIMEOUT; +import static org.apache.dubbo.common.constants.CommonConstants.GROUP_CHAR_SEPARATOR; +import static org.apache.dubbo.common.constants.CommonConstants.QUEUES_KEY; +import static org.apache.dubbo.common.constants.CommonConstants.TIMEOUT_KEY; +import static org.apache.dubbo.common.constants.LoggerCodeConstants.TRANSPORT_FAILED_RESPONSE; +import static org.apache.dubbo.metadata.MetadataConstants.META_DATA_STORE_TAG; +import static org.apache.dubbo.metadata.ServiceNameMapping.DEFAULT_MAPPING_GROUP; +import static org.apache.dubbo.metadata.ServiceNameMapping.getAppNames; +import static org.apache.dubbo.metadata.report.support.Constants.DEFAULT_METADATA_REPORT_CYCLE_REPORT; + +/** + * RedisMetadataReport + */ +public class RedisMetadataReport extends AbstractMetadataReport { + + private static final int ONE_DAY_IN_MILLISECONDS = 86400000; + + private static final String REDIS_DATABASE_KEY = "database"; + private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(RedisMetadataReport.class); + + // protected , for test + protected JedisPool pool; + private Set jedisClusterNodes; + private int timeout; + private String password; + private final String root; + private final ConcurrentHashMap mappingDataListenerMap = new ConcurrentHashMap<>(); + private SetParams jedisParams = SetParams.setParams(); + + public RedisMetadataReport(URL url) { + super(url); + timeout = url.getParameter(TIMEOUT_KEY, DEFAULT_TIMEOUT); + password = url.getPassword(); + this.root = url.getGroup(DEFAULT_ROOT); + if (url.getParameter(CYCLE_REPORT_KEY, DEFAULT_METADATA_REPORT_CYCLE_REPORT)) { + // ttl default is twice the cycle-report time + jedisParams.ex(ONE_DAY_IN_MILLISECONDS * 2); + } + if (url.getParameter(CLUSTER_KEY, false)) { + jedisClusterNodes = new HashSet<>(); + List urls = url.getBackupUrls(); + for (URL tmpUrl : urls) { + jedisClusterNodes.add(new HostAndPort(tmpUrl.getHost(), tmpUrl.getPort())); + } + } else { + int database = url.getParameter(REDIS_DATABASE_KEY, 0); + pool = new JedisPool(new JedisPoolConfig(), url.getHost(), url.getPort(), timeout, password, database); + } + } + + @Override + protected void doStoreProviderMetadata(MetadataIdentifier providerMetadataIdentifier, String serviceDefinitions) { + this.storeMetadata(providerMetadataIdentifier, serviceDefinitions); + } + + @Override + protected void doStoreConsumerMetadata(MetadataIdentifier consumerMetadataIdentifier, String value) { + this.storeMetadata(consumerMetadataIdentifier, value); + } + + @Override + protected void doSaveMetadata(ServiceMetadataIdentifier serviceMetadataIdentifier, URL url) { + this.storeMetadata(serviceMetadataIdentifier, URL.encode(url.toFullString())); + } + + @Override + protected void doRemoveMetadata(ServiceMetadataIdentifier serviceMetadataIdentifier) { + this.deleteMetadata(serviceMetadataIdentifier); + } + + @Override + protected List doGetExportedURLs(ServiceMetadataIdentifier metadataIdentifier) { + String content = getMetadata(metadataIdentifier); + if (StringUtils.isEmpty(content)) { + return Collections.emptyList(); + } + return new ArrayList<>(Arrays.asList(URL.decode(content))); + } + + @Override + protected void doSaveSubscriberData(SubscriberMetadataIdentifier subscriberMetadataIdentifier, String urlListStr) { + this.storeMetadata(subscriberMetadataIdentifier, urlListStr); + } + + @Override + protected String doGetSubscribedURLs(SubscriberMetadataIdentifier subscriberMetadataIdentifier) { + return this.getMetadata(subscriberMetadataIdentifier); + } + + @Override + public String getServiceDefinition(MetadataIdentifier metadataIdentifier) { + return this.getMetadata(metadataIdentifier); + } + + private void storeMetadata(BaseMetadataIdentifier metadataIdentifier, String v) { + if (pool != null) { + storeMetadataStandalone(metadataIdentifier, v); + } else { + storeMetadataInCluster(metadataIdentifier, v); + } + } + + private void storeMetadataInCluster(BaseMetadataIdentifier metadataIdentifier, String v) { + try (JedisCluster jedisCluster = + new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + jedisCluster.set(metadataIdentifier.getIdentifierKey() + META_DATA_STORE_TAG, v, jedisParams); + } catch (Throwable e) { + String msg = + "Failed to put " + metadataIdentifier + " to redis cluster " + v + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private void storeMetadataStandalone(BaseMetadataIdentifier metadataIdentifier, String v) { + try (Jedis jedis = pool.getResource()) { + jedis.set(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY), v, jedisParams); + } catch (Throwable e) { + String msg = "Failed to put " + metadataIdentifier + " to redis " + v + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private void deleteMetadata(BaseMetadataIdentifier metadataIdentifier) { + if (pool != null) { + deleteMetadataStandalone(metadataIdentifier); + } else { + deleteMetadataInCluster(metadataIdentifier); + } + } + + private void deleteMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) { + try (JedisCluster jedisCluster = + new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + jedisCluster.del(metadataIdentifier.getIdentifierKey() + META_DATA_STORE_TAG); + } catch (Throwable e) { + String msg = "Failed to delete " + metadataIdentifier + " from redis cluster , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private void deleteMetadataStandalone(BaseMetadataIdentifier metadataIdentifier) { + try (Jedis jedis = pool.getResource()) { + jedis.del(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY)); + } catch (Throwable e) { + String msg = "Failed to delete " + metadataIdentifier + " from redis , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private String getMetadata(BaseMetadataIdentifier metadataIdentifier) { + if (pool != null) { + return getMetadataStandalone(metadataIdentifier); + } else { + return getMetadataInCluster(metadataIdentifier); + } + } + + private String getMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) { + try (JedisCluster jedisCluster = + new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + return jedisCluster.get(metadataIdentifier.getIdentifierKey() + META_DATA_STORE_TAG); + } catch (Throwable e) { + String msg = "Failed to get " + metadataIdentifier + " from redis cluster , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private String getMetadataStandalone(BaseMetadataIdentifier metadataIdentifier) { + try (Jedis jedis = pool.getResource()) { + return jedis.get(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY)); + } catch (Throwable e) { + String msg = "Failed to get " + metadataIdentifier + " from redis , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + /** + * Store class and application names using Redis hashes + * key: default 'dubbo:mapping' + * field: class (serviceInterface) + * value: application_names + * @param serviceInterface field(class) + * @param defaultMappingGroup {@link ServiceNameMapping#DEFAULT_MAPPING_GROUP} + * @param newConfigContent new application_names + * @param ticket previous application_names + * @return + */ + @Override + public boolean registerServiceAppMapping( + String serviceInterface, String defaultMappingGroup, String newConfigContent, Object ticket) { + try { + if (null != ticket && !(ticket instanceof String)) { + throw new IllegalArgumentException("redis publishConfigCas requires stat type ticket"); + } + String pathKey = buildMappingKey(defaultMappingGroup); + + return storeMapping(pathKey, serviceInterface, newConfigContent, (String) ticket); + } catch (Exception e) { + logger.warn(TRANSPORT_FAILED_RESPONSE, "", "", "redis publishConfigCas failed.", e); + return false; + } + } + + private boolean storeMapping(String key, String field, String value, String ticket) { + if (pool != null) { + return storeMappingStandalone(key, field, value, ticket); + } else { + return storeMappingInCluster(key, field, value, ticket); + } + } + + /** + * use 'watch' to implement cas. + * Find information about slot distribution by key. + */ + private boolean storeMappingInCluster(String key, String field, String value, String ticket) { + try (JedisCluster jedisCluster = + new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + Jedis jedis = new Jedis(jedisCluster.getConnectionFromSlot(JedisClusterCRC16.getSlot(key))); + jedis.watch(key); + String oldValue = jedis.hget(key, field); + if (null == oldValue || null == ticket || oldValue.equals(ticket)) { + Transaction transaction = jedis.multi(); + transaction.hset(key, field, value); + List result = transaction.exec(); + if (null != result) { + jedisCluster.publish(buildPubSubKey(), field); + return true; + } + } else { + jedis.unwatch(); + } + jedis.close(); + } catch (Throwable e) { + String msg = "Failed to put " + key + ":" + field + " to redis " + value + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + return false; + } + + /** + * use 'watch' to implement cas. + * Find information about slot distribution by key. + */ + private boolean storeMappingStandalone(String key, String field, String value, String ticket) { + try (Jedis jedis = pool.getResource()) { + jedis.watch(key); + String oldValue = jedis.hget(key, field); + if (null == oldValue || null == ticket || oldValue.equals(ticket)) { + Transaction transaction = jedis.multi(); + transaction.hset(key, field, value); + List result = transaction.exec(); + if (null != result) { + jedis.publish(buildPubSubKey(), field); + return true; + } + } + jedis.unwatch(); + } catch (Throwable e) { + String msg = "Failed to put " + key + ":" + field + " to redis " + value + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + return false; + } + + /** + * build mapping key + * @param defaultMappingGroup {@link ServiceNameMapping#DEFAULT_MAPPING_GROUP} + * @return + */ + private String buildMappingKey(String defaultMappingGroup) { + return this.root + GROUP_CHAR_SEPARATOR + defaultMappingGroup; + } + + /** + * build pub/sub key + */ + private String buildPubSubKey() { + return buildMappingKey(DEFAULT_MAPPING_GROUP) + GROUP_CHAR_SEPARATOR + QUEUES_KEY; + } + + /** + * get content and use content to complete cas + * @param serviceKey class + * @param group {@link ServiceNameMapping#DEFAULT_MAPPING_GROUP} + */ + @Override + public ConfigItem getConfigItem(String serviceKey, String group) { + String key = buildMappingKey(group); + String content = getMappingData(key, serviceKey); + + return new ConfigItem(content, content); + } + + /** + * get current application_names + */ + private String getMappingData(String key, String field) { + if (pool != null) { + return getMappingDataStandalone(key, field); + } else { + return getMappingDataInCluster(key, field); + } + } + + private String getMappingDataInCluster(String key, String field) { + try (JedisCluster jedisCluster = + new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + return jedisCluster.hget(key, field); + } catch (Throwable e) { + String msg = "Failed to get " + key + ":" + field + " from redis cluster , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + private String getMappingDataStandalone(String key, String field) { + try (Jedis jedis = pool.getResource()) { + return jedis.hget(key, field); + } catch (Throwable e) { + String msg = "Failed to get " + key + ":" + field + " from redis , cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + + /** + * remove listener. If have no listener,thread will dead + */ + @Override + public void removeServiceAppMappingListener(String serviceKey, MappingListener listener) { + MappingDataListener mappingDataListener = mappingDataListenerMap.get(buildPubSubKey()); + if (null != mappingDataListener) { + NotifySub notifySub = mappingDataListener.getNotifySub(); + notifySub.removeListener(serviceKey, listener); + if (notifySub.isEmpty()) { + mappingDataListener.shutdown(); + } + } + } + + /** + * Start a thread and subscribe to {@link this#buildPubSubKey()}. + * Notify {@link MappingListener} if there is a change in the 'application_names' message. + */ + @Override + public Set getServiceAppMapping(String serviceKey, MappingListener listener, URL url) { + MappingDataListener mappingDataListener = + ConcurrentHashMapUtils.computeIfAbsent(mappingDataListenerMap, buildPubSubKey(), k -> { + MappingDataListener dataListener = new MappingDataListener(buildPubSubKey()); + dataListener.start(); + return dataListener; + }); + mappingDataListener.getNotifySub().addListener(serviceKey, listener); + return this.getServiceAppMapping(serviceKey, url); + } + + @Override + public Set getServiceAppMapping(String serviceKey, URL url) { + String key = buildMappingKey(DEFAULT_MAPPING_GROUP); + return getAppNames(getMappingData(key, serviceKey)); + } + + @Override + public MetadataInfo getAppMetadata(SubscriberMetadataIdentifier identifier, Map instanceMetadata) { + String content = this.getMetadata(identifier); + return JsonUtils.toJavaObject(content, MetadataInfo.class); + } + + @Override + public void publishAppMetadata(SubscriberMetadataIdentifier identifier, MetadataInfo metadataInfo) { + this.storeMetadata(identifier, metadataInfo.getContent()); + } + + @Override + public void unPublishAppMetadata(SubscriberMetadataIdentifier identifier, MetadataInfo metadataInfo) { + this.deleteMetadata(identifier); + } + + // for test + public MappingDataListener getMappingDataListener() { + return mappingDataListenerMap.get(buildPubSubKey()); + } + + /** + * Listen for changes in the 'application_names' message and notify the listener. + */ + class NotifySub extends JedisPubSub { + + private final Map> listeners = new ConcurrentHashMap<>(); + + public void addListener(String key, MappingListener listener) { + Set listenerSet = listeners.computeIfAbsent(key, k -> new ConcurrentHashSet<>()); + listenerSet.add(listener); + } + + public void removeListener(String serviceKey, MappingListener listener) { + Set listenerSet = this.listeners.get(serviceKey); + if (listenerSet != null) { + listenerSet.remove(listener); + if (listenerSet.isEmpty()) { + this.listeners.remove(serviceKey); + } + } + } + + public Boolean isEmpty() { + return this.listeners.isEmpty(); + } + + @Override + public void onMessage(String key, String msg) { + logger.info("sub from redis:" + key + " message:" + msg); + String applicationNames = getMappingData(buildMappingKey(DEFAULT_MAPPING_GROUP), msg); + MappingChangedEvent mappingChangedEvent = new MappingChangedEvent(msg, getAppNames(applicationNames)); + if (!CollectionUtils.isEmpty(listeners.get(msg))) { + for (MappingListener mappingListener : listeners.get(msg)) { + mappingListener.onEvent(mappingChangedEvent); + } + } + } + + @Override + public void onPMessage(String pattern, String key, String msg) { + onMessage(key, msg); + } + + @Override + public void onPSubscribe(String pattern, int subscribedChannels) { + super.onPSubscribe(pattern, subscribedChannels); + } + } + + /** + * Subscribe application names change message. + */ + class MappingDataListener extends Thread { + + private String path; + + private final NotifySub notifySub = new NotifySub(); + // for test + protected volatile boolean running = true; + + public MappingDataListener(String path) { + this.path = path; + } + + public NotifySub getNotifySub() { + return notifySub; + } + + @Override + public void run() { + while (running) { + if (pool != null) { + try (Jedis jedis = pool.getResource()) { + jedis.subscribe(notifySub, path); + } catch (Throwable e) { + String msg = "Failed to subscribe " + path + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } else { + try (JedisCluster jedisCluster = new JedisCluster( + jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) { + jedisCluster.subscribe(notifySub, path); + } catch (Throwable e) { + String msg = "Failed to subscribe " + path + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + throw new RpcException(msg, e); + } + } + } + } + + public void shutdown() { + try { + running = false; + notifySub.unsubscribe(path); + } catch (Throwable e) { + String msg = "Failed to unsubscribe " + path + ", cause: " + e.getMessage(); + logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e); + } + } + } +} diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportFactory.java b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportFactory.java new file mode 100644 index 000000000..d871877f6 --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportFactory.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.metadata.store.redis; + +import org.apache.dubbo.common.URL; +import org.apache.dubbo.metadata.report.MetadataReport; +import org.apache.dubbo.metadata.report.support.AbstractMetadataReportFactory; + +/** + * RedisMetadataReportFactory. + */ +public class RedisMetadataReportFactory extends AbstractMetadataReportFactory { + + @Override + public MetadataReport createMetadataReport(URL url) { + return new RedisMetadataReport(url); + } +} diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/resources/META-INF/dubbo/internal/org.apache.dubbo.metadata.report.MetadataReportFactory b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/resources/META-INF/dubbo/internal/org.apache.dubbo.metadata.report.MetadataReportFactory new file mode 100644 index 000000000..2e6effa12 --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/main/resources/META-INF/dubbo/internal/org.apache.dubbo.metadata.report.MetadataReportFactory @@ -0,0 +1 @@ +redis=org.apache.dubbo.metadata.store.redis.RedisMetadataReportFactory diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadata4TstService.java b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadata4TstService.java new file mode 100644 index 000000000..c9662c31d --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadata4TstService.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.metadata.store.redis; + +public interface RedisMetadata4TstService { + + int getCounter(); + + void printResult(String var); +} diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportTest.java b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportTest.java new file mode 100644 index 000000000..5a922875f --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/java/org/apache/dubbo/metadata/store/redis/RedisMetadataReportTest.java @@ -0,0 +1,365 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.metadata.store.redis; + +import org.apache.commons.lang3.SystemUtils; +import org.apache.dubbo.common.URL; +import org.apache.dubbo.common.config.configcenter.ConfigItem; +import org.apache.dubbo.common.utils.JsonUtils; +import org.apache.dubbo.common.utils.NetUtils; +import org.apache.dubbo.metadata.MappingChangedEvent; +import org.apache.dubbo.metadata.MappingListener; +import org.apache.dubbo.metadata.MetadataInfo; +import org.apache.dubbo.metadata.definition.ServiceDefinitionBuilder; +import org.apache.dubbo.metadata.definition.model.FullServiceDefinition; +import org.apache.dubbo.metadata.report.identifier.KeyTypeEnum; +import org.apache.dubbo.metadata.report.identifier.MetadataIdentifier; +import org.apache.dubbo.metadata.report.identifier.SubscriberMetadataIdentifier; +import org.apache.dubbo.rpc.RpcException; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.CountDownLatch; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.OS; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.exceptions.JedisConnectionException; +import redis.clients.jedis.exceptions.JedisDataException; +import redis.embedded.RedisServer; + +import static org.apache.dubbo.common.constants.CommonConstants.CONSUMER_SIDE; +import static org.apache.dubbo.common.constants.CommonConstants.PROVIDER_SIDE; +import static org.apache.dubbo.common.constants.CommonConstants.SYNC_REPORT_KEY; +import static org.apache.dubbo.metadata.ServiceNameMapping.DEFAULT_MAPPING_GROUP; +import static redis.embedded.RedisServer.newRedisServer; + +@DisabledOnOs(OS.WINDOWS) +class RedisMetadataReportTest { + + private static final String REDIS_URL_TEMPLATE = "redis://%slocalhost:%d", + REDIS_PASSWORD = "password", + REDIS_URL_AUTH_SECTION = "username:" + REDIS_PASSWORD + "@"; + + RedisMetadataReport redisMetadataReport; + RedisMetadataReport syncRedisMetadataReport; + RedisServer redisServer; + URL registryUrl; + + @BeforeEach + public void constructor(final TestInfo testInfo) { + final boolean usesAuthentication = usesAuthentication(testInfo); + int redisPort = 0; + IOException exception = null; + + for (int i = 0; i < 10; i++) { + try { + redisPort = NetUtils.getAvailablePort(30000 + new Random().nextInt(10000)); + redisServer = newRedisServer() + .port(redisPort) + // set maxheap to fix Windows error 0x70 while starting redis + .settingIf(SystemUtils.IS_OS_WINDOWS, "maxheap 128mb") + .settingIf(usesAuthentication, "requirepass " + REDIS_PASSWORD) + .build(); + this.redisServer.start(); + exception = null; + } catch (IOException e) { + e.printStackTrace(); + exception = e; + } + if (exception == null) { + break; + } + } + + Assertions.assertNull(exception); + registryUrl = newRedisUrl(usesAuthentication, redisPort); + redisMetadataReport = (RedisMetadataReport) new RedisMetadataReportFactory().createMetadataReport(registryUrl); + URL syncRegistryUrl = registryUrl.addParameter(SYNC_REPORT_KEY, "true"); + syncRedisMetadataReport = + (RedisMetadataReport) new RedisMetadataReportFactory().createMetadataReport(syncRegistryUrl); + } + + private static boolean usesAuthentication(final TestInfo testInfo) { + final String methodName = testInfo.getTestMethod().get().getName(); + return "testAuthRedisMetadata".equals(methodName) || "testWrongAuthRedisMetadata".equals(methodName); + } + + private static URL newRedisUrl(final boolean usesAuthentication, final int redisPort) { + final String urlAuthSection = usesAuthentication ? REDIS_URL_AUTH_SECTION : ""; + return URL.valueOf(String.format(REDIS_URL_TEMPLATE, urlAuthSection, redisPort)); + } + + @AfterEach + public void tearDown() throws Exception { + this.redisServer.stop(); + } + + @Test + void testAsyncStoreProvider() throws ClassNotFoundException { + testStoreProvider(redisMetadataReport, "1.0.0.redis.md.p1", 3000); + } + + @Test + void testSyncStoreProvider() throws ClassNotFoundException { + testStoreProvider(syncRedisMetadataReport, "1.0.0.redis.md.p2", 3); + } + + private void testStoreProvider(RedisMetadataReport redisMetadataReport, String version, long moreTime) + throws ClassNotFoundException { + String interfaceName = "org.apache.dubbo.metadata.store.redis.RedisMetadata4TstService"; + String group = null; + String application = "vic.redis.md"; + MetadataIdentifier providerMetadataIdentifier = + storePrivider(redisMetadataReport, interfaceName, version, group, application); + Jedis jedis = null; + try { + jedis = redisMetadataReport.pool.getResource(); + String keyTmp = providerMetadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY); + String value = jedis.get(keyTmp); + if (value == null) { + Thread.sleep(moreTime); + value = jedis.get(keyTmp); + } + + Assertions.assertNotNull(value); + + FullServiceDefinition fullServiceDefinition = JsonUtils.toJavaObject(value, FullServiceDefinition.class); + Assertions.assertEquals(fullServiceDefinition.getParameters().get("paramTest"), "redisTest"); + } catch (Throwable e) { + throw new RpcException("Failed to put to redis . cause: " + e.getMessage(), e); + } finally { + if (jedis != null) { + jedis.del(providerMetadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY)); + } + redisMetadataReport.pool.close(); + } + } + + @Test + void testAsyncStoreConsumer() throws ClassNotFoundException { + testStoreConsumer(redisMetadataReport, "1.0.0.redis.md.c1", 3000); + } + + @Test + void testSyncStoreConsumer() throws ClassNotFoundException { + testStoreConsumer(syncRedisMetadataReport, "1.0.0.redis.md.c2", 3); + } + + private void testStoreConsumer(RedisMetadataReport redisMetadataReport, String version, long moreTime) + throws ClassNotFoundException { + String interfaceName = "org.apache.dubbo.metadata.store.redis.RedisMetadata4TstService"; + String group = null; + String application = "vic.redis.md"; + MetadataIdentifier consumerMetadataIdentifier = + storeConsumer(redisMetadataReport, interfaceName, version, group, application); + Jedis jedis = null; + try { + jedis = redisMetadataReport.pool.getResource(); + String keyTmp = consumerMetadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY); + String value = jedis.get(keyTmp); + if (value == null) { + Thread.sleep(moreTime); + value = jedis.get(keyTmp); + } + Assertions.assertEquals(value, "{\"paramConsumerTest\":\"redisCm\"}"); + } catch (Throwable e) { + throw new RpcException("Failed to put to redis . cause: " + e.getMessage(), e); + } finally { + if (jedis != null) { + jedis.del(consumerMetadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY)); + } + redisMetadataReport.pool.close(); + } + } + + private MetadataIdentifier storePrivider( + RedisMetadataReport redisMetadataReport, + String interfaceName, + String version, + String group, + String application) + throws ClassNotFoundException { + URL url = URL.valueOf("xxx://" + NetUtils.getLocalAddress().getHostName() + ":4444/" + interfaceName + + "?paramTest=redisTest&version=" + version + "&application=" + application + + (group == null ? "" : "&group=" + group)); + + MetadataIdentifier providerMetadataIdentifier = + new MetadataIdentifier(interfaceName, version, group, PROVIDER_SIDE, application); + Class interfaceClass = Class.forName(interfaceName); + FullServiceDefinition fullServiceDefinition = + ServiceDefinitionBuilder.buildFullDefinition(interfaceClass, url.getParameters()); + + redisMetadataReport.storeProviderMetadata(providerMetadataIdentifier, fullServiceDefinition); + try { + Thread.sleep(300); + } catch (InterruptedException e) { + e.printStackTrace(); + } + return providerMetadataIdentifier; + } + + private MetadataIdentifier storeConsumer( + RedisMetadataReport redisMetadataReport, + String interfaceName, + String version, + String group, + String application) + throws ClassNotFoundException { + URL url = URL.valueOf("xxx://" + NetUtils.getLocalAddress().getHostName() + ":4444/" + interfaceName + + "?version=" + version + "&application=" + application + (group == null ? "" : "&group=" + group)); + + MetadataIdentifier consumerMetadataIdentifier = + new MetadataIdentifier(interfaceName, version, group, CONSUMER_SIDE, application); + Class interfaceClass = Class.forName(interfaceName); + + Map tmp = new HashMap<>(); + tmp.put("paramConsumerTest", "redisCm"); + redisMetadataReport.storeConsumerMetadata(consumerMetadataIdentifier, tmp); + try { + Thread.sleep(300); + } catch (InterruptedException e) { + e.printStackTrace(); + } + return consumerMetadataIdentifier; + } + + @Test + void testAuthRedisMetadata() throws ClassNotFoundException { + testStoreProvider(redisMetadataReport, "1.0.0.redis.md.p1", 3000); + } + + @Test + void testWrongAuthRedisMetadata() throws ClassNotFoundException { + redisMetadataReport = (RedisMetadataReport) new RedisMetadataReportFactory().createMetadataReport(registryUrl); + try { + testStoreProvider(redisMetadataReport, "1.0.0.redis.md.p1", 3000); + } catch (RpcException e) { + if (e.getCause() instanceof JedisConnectionException + && e.getCause().getCause() instanceof JedisDataException) { + Assertions.assertEquals( + "WRONGPASS invalid username-password pair or user is disabled.", + e.getCause().getCause().getMessage()); + } else { + Assertions.fail("no invalid password exception!"); + } + } + } + + @Test + void testRegisterServiceAppMapping() throws InterruptedException { + String serviceKey1 = "org.apache.dubbo.metadata.store.redis.RedisMetadata4TstService"; + String serviceKey2 = "org.apache.dubbo.metadata.store.redis.RedisMetadata4TstService2"; + + String appNames1 = "test1"; + String appNames2 = "test1,test2"; + CountDownLatch latch = new CountDownLatch(2); + CountDownLatch latch2 = new CountDownLatch(2); + + MappingListener mappingListener = new MappingListener() { + @Override + public void onEvent(MappingChangedEvent event) { + Set apps = event.getApps(); + if (apps.size() == 1) { + Assertions.assertTrue(apps.contains("test1")); + } else { + Assertions.assertTrue(apps.contains("test1")); + Assertions.assertTrue(apps.contains("test2")); + } + if (serviceKey1.equals(event.getServiceKey())) { + latch.countDown(); + } else if (serviceKey2.equals(event.getServiceKey())) { + latch2.countDown(); + } + } + + @Override + public void stop() {} + }; + + Set serviceAppMapping = + redisMetadataReport.getServiceAppMapping(serviceKey1, mappingListener, registryUrl); + + Assertions.assertTrue(serviceAppMapping.isEmpty()); + + ConfigItem configItem = redisMetadataReport.getConfigItem(serviceKey1, DEFAULT_MAPPING_GROUP); + + redisMetadataReport.registerServiceAppMapping( + serviceKey1, DEFAULT_MAPPING_GROUP, appNames1, configItem.getTicket()); + configItem = redisMetadataReport.getConfigItem(serviceKey1, DEFAULT_MAPPING_GROUP); + + redisMetadataReport.registerServiceAppMapping( + serviceKey1, DEFAULT_MAPPING_GROUP, appNames2, configItem.getTicket()); + + latch.await(); + + serviceAppMapping = redisMetadataReport.getServiceAppMapping(serviceKey2, mappingListener, registryUrl); + + Assertions.assertTrue(serviceAppMapping.isEmpty()); + + configItem = redisMetadataReport.getConfigItem(serviceKey2, DEFAULT_MAPPING_GROUP); + + redisMetadataReport.registerServiceAppMapping( + serviceKey2, DEFAULT_MAPPING_GROUP, appNames1, configItem.getTicket()); + configItem = redisMetadataReport.getConfigItem(serviceKey2, DEFAULT_MAPPING_GROUP); + redisMetadataReport.registerServiceAppMapping( + serviceKey2, DEFAULT_MAPPING_GROUP, appNames2, configItem.getTicket()); + + latch2.await(); + RedisMetadataReport.MappingDataListener mappingDataListener = redisMetadataReport.getMappingDataListener(); + Assertions.assertTrue(mappingDataListener.running); + Assertions.assertTrue(!mappingDataListener.getNotifySub().isEmpty()); + + redisMetadataReport.removeServiceAppMappingListener(serviceKey1, mappingListener); + Assertions.assertTrue(mappingDataListener.running); + Assertions.assertTrue(!mappingDataListener.getNotifySub().isEmpty()); + redisMetadataReport.removeServiceAppMappingListener(serviceKey2, mappingListener); + Assertions.assertTrue(!mappingDataListener.running); + Assertions.assertTrue(mappingDataListener.getNotifySub().isEmpty()); + } + + @Test + void testAppMetadata() { + String serviceKey = "org.apache.dubbo.metadata.store.redis.RedisMetadata4TstService"; + String appName = "demo"; + URL url = URL.valueOf("test://127.0.0.1:8888/" + serviceKey); + + MetadataInfo metadataInfo = new MetadataInfo(appName); + metadataInfo.addService(url); + SubscriberMetadataIdentifier identifier = + new SubscriberMetadataIdentifier(appName, metadataInfo.calAndGetRevision()); + MetadataInfo appMetadata = redisMetadataReport.getAppMetadata(identifier, Collections.emptyMap()); + Assertions.assertNull(appMetadata); + + redisMetadataReport.publishAppMetadata(identifier, metadataInfo); + appMetadata = redisMetadataReport.getAppMetadata(identifier, Collections.emptyMap()); + Assertions.assertNotNull(appMetadata); + Assertions.assertEquals(appMetadata.toFullString(), metadataInfo.toFullString()); + redisMetadataReport.unPublishAppMetadata(identifier, metadataInfo); + appMetadata = redisMetadataReport.getAppMetadata(identifier, Collections.emptyMap()); + Assertions.assertNull(appMetadata); + } +} diff --git a/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/resources/log4j2-test.xml b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/resources/log4j2-test.xml new file mode 100644 index 000000000..14fdc8c51 --- /dev/null +++ b/dubbo-metadata-report-extensions/dubbo-metadata-report-redis/src/test/resources/log4j2-test.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + diff --git a/dubbo-metadata-report-extensions/pom.xml b/dubbo-metadata-report-extensions/pom.xml index 2414b4262..1c458b26e 100644 --- a/dubbo-metadata-report-extensions/pom.xml +++ b/dubbo-metadata-report-extensions/pom.xml @@ -34,5 +34,6 @@ dubbo-metadata-report-consul dubbo-metadata-report-etcd dubbo-metadata-rest + dubbo-metadata-report-redis diff --git a/dubbo-registry-extensions/dubbo-registry-redis/src/test/java/org/apache/dubbo/registry/redis/RedisRegistryTest.java b/dubbo-registry-extensions/dubbo-registry-redis/src/test/java/org/apache/dubbo/registry/redis/RedisRegistryTest.java index 45ac89dad..78aff0282 100644 --- a/dubbo-registry-extensions/dubbo-registry-redis/src/test/java/org/apache/dubbo/registry/redis/RedisRegistryTest.java +++ b/dubbo-registry-extensions/dubbo-registry-redis/src/test/java/org/apache/dubbo/registry/redis/RedisRegistryTest.java @@ -28,7 +28,6 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import redis.clients.jedis.exceptions.JedisConnectionException; -import redis.clients.jedis.exceptions.JedisExhaustedPoolException; import redis.embedded.RedisServer; import java.io.IOException; @@ -242,7 +241,7 @@ public void testAvailableWithBackup() { Registry registry = new RedisRegistryFactory().createRegistry(url); Registry finalRegistry = registry; - assertThrows(JedisExhaustedPoolException.class, () -> finalRegistry.isAvailable()); + assertThrows(JedisConnectionException.class, () -> finalRegistry.isAvailable()); url = URL.valueOf(this.registryUrl.toFullString()).addParameter(BACKUP_KEY, "redisTwo:8881"); registry = new RedisRegistryFactory().createRegistry(url); diff --git a/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/jedis/ClusterRedisClient.java b/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/jedis/ClusterRedisClient.java index 87c48ee2d..fc9483cee 100644 --- a/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/jedis/ClusterRedisClient.java +++ b/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/jedis/ClusterRedisClient.java @@ -19,17 +19,21 @@ import org.apache.dubbo.common.URL; import org.apache.dubbo.common.logger.Logger; import org.apache.dubbo.common.logger.LoggerFactory; +import org.apache.dubbo.common.utils.CollectionUtils; import org.apache.dubbo.common.utils.StringUtils; import org.apache.dubbo.remoting.redis.RedisClient; import org.apache.dubbo.remoting.redis.support.AbstractRedisClient; +import redis.clients.jedis.Connection; +import redis.clients.jedis.ConnectionPool; import redis.clients.jedis.HostAndPort; -import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisCluster; -import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPubSub; +import redis.clients.jedis.params.ScanParams; +import redis.clients.jedis.resps.ScanResult; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; @@ -37,6 +41,7 @@ import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SPLIT_PATTERN; public class ClusterRedisClient extends AbstractRedisClient implements RedisClient { + private static final Logger logger = LoggerFactory.getLogger(ClusterRedisClient.class); private static final int DEFAULT_TIMEOUT = 2000; @@ -46,6 +51,7 @@ public class ClusterRedisClient extends AbstractRedisClient implements RedisClie private static final int DEFAULT_MAX_ATTEMPTS = 5; private final JedisCluster jedisCluster; + private Pattern COLON_SPLIT_PATTERN = Pattern.compile("\\s*[:]+\\s*"); public ClusterRedisClient(URL url) { @@ -56,7 +62,7 @@ public ClusterRedisClient(URL url) { } jedisCluster = new JedisCluster(nodes, url.getParameter("connection.timeout", DEFAULT_TIMEOUT), url.getParameter("so.timeout", DEFAULT_SO_TIMEOUT), url.getParameter("max.attempts", DEFAULT_MAX_ATTEMPTS), - url.getPassword(), getConfig()); + url.getPassword(), getPoolConfig()); } @Override @@ -71,9 +77,9 @@ public Long publish(String channel, String message) { @Override public boolean isConnected() { - Map poolMap = jedisCluster.getClusterNodes(); - for (JedisPool jedisPool : poolMap.values()) { - Jedis jedis = jedisPool.getResource(); + Map poolMap = jedisCluster.getClusterNodes(); + for (ConnectionPool jedisPool : poolMap.values()) { + Connection jedis = jedisPool.getResource(); if (jedis.isConnected()) { jedisPool.returnResource(jedis); return true; @@ -96,12 +102,22 @@ public Long hdel(String key, String... fields) { @Override public Set scan(String pattern) { - Map nodes = jedisCluster.getClusterNodes(); + + Map nodes = jedisCluster.getClusterNodes(); Set result = new HashSet<>(); - for (JedisPool jedisPool : nodes.values()) { - Jedis jedis = jedisPool.getResource(); - result.addAll(scan(jedis, pattern)); - jedisPool.returnResource(jedis); + String cursor = ScanParams.SCAN_POINTER_START; + ScanParams params = new ScanParams(); + params.match(pattern); + while (true) { + ScanResult scanResult = jedisCluster.scan(cursor, params); + List list = scanResult.getResult(); + if (CollectionUtils.isNotEmpty(list)) { + result.addAll(list); + } + if (ScanParams.SCAN_POINTER_START.equals(scanResult.getCursor())) { + break; + } + cursor = scanResult.getCursor(); } return result; } @@ -131,11 +147,9 @@ private Set getNodes(URL url) { hostAndPorts.add(new HostAndPort(url.getHost(), url.getPort())); String backupAddresses = url.getBackupAddress(6379); String[] nodes = StringUtils.isEmpty(backupAddresses) ? new String[0] : COMMA_SPLIT_PATTERN.split(backupAddresses); - if (nodes.length > 0) { - for (String node : nodes) { - String[] hostAndPort = COLON_SPLIT_PATTERN.split(node); - hostAndPorts.add(new HostAndPort(hostAndPort[0], Integer.valueOf(hostAndPort[1]))); - } + for (String node : nodes) { + String[] hostAndPort = COLON_SPLIT_PATTERN.split(node); + hostAndPorts.add(new HostAndPort(hostAndPort[0], Integer.parseInt(hostAndPort[1]))); } return hostAndPorts; } diff --git a/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/support/AbstractRedisClient.java b/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/support/AbstractRedisClient.java index 4af640ebf..cb13f0225 100644 --- a/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/support/AbstractRedisClient.java +++ b/dubbo-remoting-extensions/dubbo-remoting-redis/src/main/java/org/apache/dubbo/remoting/redis/support/AbstractRedisClient.java @@ -16,53 +16,74 @@ */ package org.apache.dubbo.remoting.redis.support; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; import org.apache.dubbo.common.URL; import org.apache.dubbo.common.utils.CollectionUtils; import org.apache.dubbo.remoting.redis.RedisClient; +import redis.clients.jedis.Connection; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; +import redis.clients.jedis.params.ScanParams; +import redis.clients.jedis.resps.ScanResult; +import java.time.Duration; import java.util.HashSet; import java.util.List; import java.util.Set; public abstract class AbstractRedisClient implements RedisClient { - private URL url; - private JedisPoolConfig config; + private final URL url; + + private final JedisPoolConfig config; + + private GenericObjectPoolConfig poolConfig; public AbstractRedisClient(URL url) { this.url = url; config = new JedisPoolConfig(); + poolConfig = new GenericObjectPoolConfig<>(); config.setTestOnBorrow(url.getParameter("test.on.borrow", true)); + poolConfig.setTestOnBorrow(url.getParameter("test.on.borrow", true)); config.setTestOnReturn(url.getParameter("test.on.return", false)); + poolConfig.setTestOnReturn(url.getParameter("test.on.return", false)); config.setTestWhileIdle(url.getParameter("test.while.idle", false)); + poolConfig.setTestOnBorrow(url.getParameter("test.on.borrow", false)); if (url.getParameter("max.idle", 0) > 0) { config.setMaxIdle(url.getParameter("max.idle", 0)); + poolConfig.setMaxIdle(url.getParameter("max.idle", 0)); } if (url.getParameter("min.idle", 0) > 0) { config.setMinIdle(url.getParameter("min.idle", 0)); + poolConfig.setMinIdle(url.getParameter("min.idle", 0)); } if (url.getParameter("max.active", 0) > 0) { config.setMaxTotal(url.getParameter("max.active", 0)); + poolConfig.setMaxTotal(url.getParameter("max.active", 0)); } if (url.getParameter("max.total", 0) > 0) { config.setMaxTotal(url.getParameter("max.total", 0)); + poolConfig.setMaxTotal(url.getParameter("max.total", 0)); } if (url.getParameter("max.wait", url.getParameter("timeout", 0)) > 0) { - config.setMaxWaitMillis(url.getParameter("max.wait", url.getParameter("timeout", 0))); + Duration maxWaitMillis = Duration.ofMillis(url.getParameter("timeout", 0)); + config.setMaxWait(maxWaitMillis); + poolConfig.setMaxWait(maxWaitMillis); } if (url.getParameter("num.tests.per.eviction.run", 0) > 0) { config.setNumTestsPerEvictionRun(url.getParameter("num.tests.per.eviction.run", 0)); + poolConfig.setNumTestsPerEvictionRun(url.getParameter("num.tests.per.eviction.run", 0)); } if (url.getParameter("time.between.eviction.runs.millis", 0) > 0) { - config.setTimeBetweenEvictionRunsMillis(url.getParameter("time.between.eviction.runs.millis", 0)); + Duration timeBetweenEvictionRunsMillis = Duration.ofMillis(url.getParameter("time.between.eviction.runs.millis", 0)); + config.setTimeBetweenEvictionRuns(timeBetweenEvictionRunsMillis); + poolConfig.setTimeBetweenEvictionRuns(timeBetweenEvictionRunsMillis); } if (url.getParameter("min.evictable.idle.time.millis", 0) > 0) { - config.setMinEvictableIdleTimeMillis(url.getParameter("min.evictable.idle.time.millis", 0)); + Duration minEvictableIdleTimeMillis = Duration.ofMillis(url.getParameter("min.evictable.idle.time.millis", 0)); + config.setMinEvictableIdleTime(minEvictableIdleTimeMillis); + poolConfig.setMinEvictableIdleTime(minEvictableIdleTimeMillis); } } @@ -92,4 +113,8 @@ public URL getUrl() { public JedisPoolConfig getConfig() { return config; } + + public GenericObjectPoolConfig getPoolConfig() { + return poolConfig; + } } diff --git a/dubbo-serialization-extensions/dubbo-serialization-avro/src/test/java/org/apache/dubbo/common/serialize/avro/AvroObjectInputOutputTest.java b/dubbo-serialization-extensions/dubbo-serialization-avro/src/test/java/org/apache/dubbo/common/serialize/avro/AvroObjectInputOutputTest.java index 0ef1f3a96..5ede1fcd8 100644 --- a/dubbo-serialization-extensions/dubbo-serialization-avro/src/test/java/org/apache/dubbo/common/serialize/avro/AvroObjectInputOutputTest.java +++ b/dubbo-serialization-extensions/dubbo-serialization-avro/src/test/java/org/apache/dubbo/common/serialize/avro/AvroObjectInputOutputTest.java @@ -186,7 +186,7 @@ public void testWriteReadObjectWithoutClass() throws IOException, ClassNotFoundE avroObjectOutput.flushBuffer(); pos.close(); - //这里会丢失所有信息 + //All the information is lost here Object result = avroObjectInput.readObject(); assertThat(result, not(nullValue())); diff --git a/dubbo-serialization-extensions/dubbo-serialization-fastjson/src/test/java/org/apache/dubbo/common/serialize/fastjson/FastJsonObjectInputOutputTest.java b/dubbo-serialization-extensions/dubbo-serialization-fastjson/src/test/java/org/apache/dubbo/common/serialize/fastjson/FastJsonObjectInputOutputTest.java new file mode 100644 index 000000000..781e3a2e6 --- /dev/null +++ b/dubbo-serialization-extensions/dubbo-serialization-fastjson/src/test/java/org/apache/dubbo/common/serialize/fastjson/FastJsonObjectInputOutputTest.java @@ -0,0 +1,203 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.common.serialize.fastjson; + +import com.example.test.TestPojo; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Assertions; +import java.io.IOException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; + +public class FastJsonObjectInputOutputTest { + private FastJsonObjectOutput fastJsonObjectOutput; + private FastJsonObjectInput fastJsonObjectInput; + + private PipedOutputStream pos; + private PipedInputStream pis; + + + /** + * Sets up the test environment by initializing PipedInputStream, + * PipedOutputStream, and connecting them before each test. + * + * @throws IOException if there's an issue initializing the streams. + */ + @BeforeEach + public void setup() throws IOException { + pis = new PipedInputStream(); + pos = new PipedOutputStream(); + pis.connect(pos); + + fastJsonObjectOutput = new FastJsonObjectOutput(pos); + fastJsonObjectInput = new FastJsonObjectInput(pis); + } + + /** + * Cleans up the test environment by closing PipedInputStream and + * PipedOutputStream after each test. + * + * @throws IOException if there's an issue closing the streams. + */ + @AfterEach + public void clean() throws IOException { + if (pos != null) { + pos.close(); + } + if (pis != null) { + pis.close(); + } + } + + @Test + public void testWriteReadString() throws IOException { + String testData = "Test String"; + fastJsonObjectOutput.writeUTF(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + String result = fastJsonObjectInput.readObject(String.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadBool() throws IOException { + fastJsonObjectOutput.writeBool(true); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + boolean result = fastJsonObjectInput.readObject(Boolean.class); + Assertions.assertTrue(result); + } + + @Test + public void testWriteReadByte() throws IOException { + byte testData = 42; + fastJsonObjectOutput.writeByte(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Byte result = fastJsonObjectInput.readObject(Byte.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadBytes() throws IOException { + byte[] testData = "example".getBytes(); + fastJsonObjectOutput.writeBytes(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + byte[] result = fastJsonObjectInput.readBytes(); + Assertions.assertArrayEquals(testData, result); + } + + @Test + public void testWriteReadShort() throws IOException { + short testData = 32767; + fastJsonObjectOutput.writeShort(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Short result = fastJsonObjectInput.readObject(Short.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadInt() throws IOException { + int testData = 123456; + fastJsonObjectOutput.writeInt(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Integer result = fastJsonObjectInput.readObject(Integer.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadLong() throws IOException { + long testData = 123456789L; + fastJsonObjectOutput.writeLong(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Long result = fastJsonObjectInput.readObject(Long.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadFloat() throws IOException { + float testData = 3.14f; + fastJsonObjectOutput.writeFloat(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Float result = fastJsonObjectInput.readObject(Float.class); + Assertions.assertEquals(testData, result, 0.0001f); + } + + @Test + public void testWriteReadDouble() throws IOException { + double testData = 3.14159; + fastJsonObjectOutput.writeDouble(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Double result = fastJsonObjectInput.readObject(Double.class); + Assertions.assertEquals(testData, result, 0.000001); + } + + @Test + public void testWriteReadUTF() throws IOException { + String testData = "Hello, World!"; + fastJsonObjectOutput.writeUTF(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + String result = fastJsonObjectInput.readObject(String.class); + Assertions.assertEquals(testData, result); + } + + @Test + public void testWriteReadObject() throws IOException { + TestPojo testData = new TestPojo("Alice"); + fastJsonObjectOutput.writeObject(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + TestPojo result = fastJsonObjectInput.readObject(TestPojo.class); + Assertions.assertNotNull(result); + Assertions.assertEquals(testData.getData(), result.getData()); + } + + @Test + public void testWriteReadObjectWithoutClass() throws IOException { + + TestPojo testData = new TestPojo("Bob"); + fastJsonObjectOutput.writeObject(testData); + fastJsonObjectOutput.flushBuffer(); + pos.close(); + + Object result = fastJsonObjectInput.readObject(); + Assertions.assertNotNull(result); + + Assertions.assertEquals("Bob", ((TestPojo) result).getData()); + } +} + diff --git a/dubbo-serialization-extensions/dubbo-serialization-fury/src/main/java/org/apache/dubbo/common/serialize/fury/dubbo/BaseFurySerialization.java b/dubbo-serialization-extensions/dubbo-serialization-fury/src/main/java/org/apache/dubbo/common/serialize/fury/dubbo/BaseFurySerialization.java index 9fd0e5317..61e167322 100644 --- a/dubbo-serialization-extensions/dubbo-serialization-fury/src/main/java/org/apache/dubbo/common/serialize/fury/dubbo/BaseFurySerialization.java +++ b/dubbo-serialization-extensions/dubbo-serialization-fury/src/main/java/org/apache/dubbo/common/serialize/fury/dubbo/BaseFurySerialization.java @@ -32,7 +32,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.Optional; - /** * Fury serialization framework integration with dubbo. * diff --git a/dubbo-serialization-extensions/dubbo-serialization-fury/src/test/java/org/apache/dubbo/common/serialize/fury/FuryObjectInputTest.java b/dubbo-serialization-extensions/dubbo-serialization-fury/src/test/java/org/apache/dubbo/common/serialize/fury/FuryObjectInputTest.java index aaa23426e..3ff0c5b92 100644 --- a/dubbo-serialization-extensions/dubbo-serialization-fury/src/test/java/org/apache/dubbo/common/serialize/fury/FuryObjectInputTest.java +++ b/dubbo-serialization-extensions/dubbo-serialization-fury/src/test/java/org/apache/dubbo/common/serialize/fury/FuryObjectInputTest.java @@ -105,5 +105,4 @@ public void testReadBytes() throws Exception { byte[] result = objectInput.readBytes(); assertArrayEquals(new byte[]{10, 20, 30, 40}, result); } - } diff --git a/dubbo-serialization-extensions/dubbo-serialization-kryo/src/test/java/org/apache/dubbo/common/serialize/kryo/KryObjectInputOutputTest.java b/dubbo-serialization-extensions/dubbo-serialization-kryo/src/test/java/org/apache/dubbo/common/serialize/kryo/KryObjectInputOutputTest.java new file mode 100644 index 000000000..c11b52b81 --- /dev/null +++ b/dubbo-serialization-extensions/dubbo-serialization-kryo/src/test/java/org/apache/dubbo/common/serialize/kryo/KryObjectInputOutputTest.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.common.serialize.kryo; + + +import org.apache.dubbo.common.serialize.model.Person; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsNull.nullValue; + + +public class KryObjectInputOutputTest { + private KryoObjectInput kryoObjectInput; + private KryoObjectOutput kryoObjectOutput; + + private PipedOutputStream pos; + private PipedInputStream pis; + + @BeforeEach + public void setup() throws IOException { + pis = new PipedInputStream(); + pos = new PipedOutputStream(); + pis.connect(pos); + + kryoObjectOutput = new KryoObjectOutput(pos); + kryoObjectInput = new KryoObjectInput(pis); + } + + @AfterEach + public void clean() throws IOException { + if (pos != null) { + pos.close(); + pos = null; + } + if (pis != null) { + pis.close(); + pis = null; + } + } + + @Test + public void testWriteReadBool() throws IOException, InterruptedException { + kryoObjectOutput.writeBool(true); + kryoObjectOutput.flushBuffer(); + pos.close(); + + boolean result = kryoObjectInput.readBool(); + assertThat(result, is(true)); + } + + @Test + public void testWriteReadByte() throws IOException { + kryoObjectOutput.writeByte((byte) 'a'); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Byte result = kryoObjectInput.readByte(); + + assertThat(result, is((byte) 'a')); + } + + @Test + public void testWriteReadBytes() throws IOException { + kryoObjectOutput.writeBytes("123456".getBytes()); + kryoObjectOutput.flushBuffer(); + pos.close(); + + byte[] result = kryoObjectInput.readBytes(); + + assertThat(result, is("123456".getBytes())); + } + + @Test + public void testWriteReadShort() throws IOException { + kryoObjectOutput.writeShort((short) 1); + kryoObjectOutput.flushBuffer(); + pos.close(); + + short result = kryoObjectInput.readShort(); + + assertThat(result, is((short) 1)); + } + + @Test + public void testWriteReadInt() throws IOException { + kryoObjectOutput.writeInt(1); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Integer result = kryoObjectInput.readInt(); + + assertThat(result, is(1)); + } + + @Test + public void testReadDouble() throws IOException { + kryoObjectOutput.writeDouble(3.14d); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Double result = kryoObjectInput.readDouble(); + + assertThat(result, is(3.14d)); + } + + @Test + public void testReadLong() throws IOException { + kryoObjectOutput.writeLong(10L); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Long result = kryoObjectInput.readLong(); + + assertThat(result, is(10L)); + } + + @Test + public void testWriteReadFloat() throws IOException { + kryoObjectOutput.writeFloat(1.66f); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Float result = kryoObjectInput.readFloat(); + + assertThat(result, is(1.66F)); + } + + @Test + public void testWriteReadUTF() throws IOException { + kryoObjectOutput.writeUTF("wording"); + kryoObjectOutput.flushBuffer(); + pos.close(); + + String result = kryoObjectInput.readUTF(); + + assertThat(result, is("wording")); + } + + @Test + public void testWriteReadObject() throws IOException, ClassNotFoundException { + Person p = new Person(); + p.setAge(30); + p.setName("abc"); + + kryoObjectOutput.writeObject(p); + kryoObjectOutput.flushBuffer(); + pos.close(); + + Person result = kryoObjectInput.readObject(Person.class); + + assertThat(result, not(nullValue())); + assertThat(result.getName(), is("abc")); + assertThat(result.getAge(), is(30)); + } + + @Test + public void testWriteReadObjectWithoutClass() throws IOException, ClassNotFoundException { + Person p = new Person(); + p.setAge(30); + p.setName("abc"); + + kryoObjectOutput.writeObject(p); + kryoObjectOutput.flushBuffer(); + pos.close(); + + //All the information is lost here + Object result = kryoObjectInput.readObject(); + + assertThat(result, not(nullValue())); +// assertThat(result.getName(), is("abc")); +// assertThat(result.getAge(), is(30)); + } +} diff --git a/dubbo-serialization-extensions/dubbo-serialization-msgpack/src/test/java/org/apache/dubbo/common/serialize/msgpack/MsgpackSerializationTest.java b/dubbo-serialization-extensions/dubbo-serialization-msgpack/src/test/java/org/apache/dubbo/common/serialize/msgpack/MsgpackSerializationTest.java new file mode 100644 index 000000000..e081c2cb8 --- /dev/null +++ b/dubbo-serialization-extensions/dubbo-serialization-msgpack/src/test/java/org/apache/dubbo/common/serialize/msgpack/MsgpackSerializationTest.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.common.serialize.msgpack; + +import org.apache.dubbo.common.serialize.ObjectInput; +import org.apache.dubbo.common.serialize.ObjectOutput; + +import org.hamcrest.Matchers; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class MsgpackSerializationTest { + private MsgpackSerialization msgpackSerialization; + + @BeforeEach + public void setUp() { + this.msgpackSerialization = new MsgpackSerialization(); + } + + @Test + public void testContentType() { + assertThat(msgpackSerialization.getContentType(), is("text/json")); + } + + @Test + public void testContentTypeId() { + byte result = 27; + assertThat(msgpackSerialization.getContentTypeId(), is(result)); + } + + @Test + public void testObjectOutput() throws IOException { + ObjectOutput objectOutput = msgpackSerialization.serialize(null, mock(OutputStream.class)); + assertThat(objectOutput, Matchers.instanceOf(MsgpackObjectOutput.class)); + } + + @Test + public void testObjectInput() throws IOException { + ObjectInput objectInput = msgpackSerialization.deserialize(null, mock(InputStream.class)); + assertThat(objectInput, Matchers.instanceOf(MsgpackObjectInput.class)); + } +} diff --git a/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/NativeHessianObjectInputOutputTest.java b/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/NativeHessianObjectInputOutputTest.java new file mode 100644 index 000000000..a76b16fdf --- /dev/null +++ b/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/NativeHessianObjectInputOutputTest.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.serialize.hessian; + + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; +import static org.hamcrest.core.IsNull.nullValue; + + +public class NativeHessianObjectInputOutputTest { + private Hessian2ObjectInput hessian2ObjectInput; + private Hessian2ObjectOutput hessian2ObjectOutput; + + private PipedOutputStream pos; + private PipedInputStream pis; + + @BeforeEach + public void setup() throws IOException { + pis = new PipedInputStream(); + pos = new PipedOutputStream(); + pis.connect(pos); + + hessian2ObjectOutput = new Hessian2ObjectOutput(pos); + hessian2ObjectInput = new Hessian2ObjectInput(pis); + } + + @AfterEach + public void clean() throws IOException { + if (pos != null) { + pos.close(); + pos = null; + } + if (pis != null) { + pis.close(); + pis = null; + } + } + + @Test + public void testWriteReadBool() throws IOException, InterruptedException { + hessian2ObjectOutput.writeBool(true); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + boolean result = hessian2ObjectInput.readBool(); + assertThat(result, is(true)); + } + + @Test + public void testWriteReadByte() throws IOException { + hessian2ObjectOutput.writeByte((byte) 'a'); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Byte result = hessian2ObjectInput.readByte(); + + assertThat(result, is((byte) 'a')); + } + + @Test + public void testWriteReadBytes() throws IOException { + hessian2ObjectOutput.writeBytes("123456".getBytes()); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + byte[] result = hessian2ObjectInput.readBytes(); + + assertThat(result, is("123456".getBytes())); + } + + @Test + public void testWriteReadShort() throws IOException { + hessian2ObjectOutput.writeShort((short) 1); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + short result = hessian2ObjectInput.readShort(); + + assertThat(result, is((short) 1)); + } + + @Test + public void testWriteReadInt() throws IOException { + hessian2ObjectOutput.writeInt(1); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Integer result = hessian2ObjectInput.readInt(); + + assertThat(result, is(1)); + } + + @Test + public void testReadDouble() throws IOException { + hessian2ObjectOutput.writeDouble(3.14d); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Double result = hessian2ObjectInput.readDouble(); + + assertThat(result, is(3.14d)); + } + + @Test + public void testReadLong() throws IOException { + hessian2ObjectOutput.writeLong(10L); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Long result = hessian2ObjectInput.readLong(); + + assertThat(result, is(10L)); + } + + @Test + public void testWriteReadFloat() throws IOException { + hessian2ObjectOutput.writeFloat(1.66f); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Float result = hessian2ObjectInput.readFloat(); + + assertThat(result, is(1.66F)); + } + + @Test + public void testWriteReadUTF() throws IOException { + hessian2ObjectOutput.writeUTF("wording"); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + String result = hessian2ObjectInput.readUTF(); + + assertThat(result, is("wording")); + } + + @Test + public void testWriteReadObject() throws IOException, ClassNotFoundException { + Person p = new Person(); + p.setAge(30); + p.setName("abc"); + + hessian2ObjectOutput.writeObject(p); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + Person result = hessian2ObjectInput.readObject(Person.class); + + assertThat(result, not(nullValue())); + assertThat(result.getName(), is("abc")); + assertThat(result.getAge(), is(30)); + } + + @Test + public void testWriteReadObjectWithoutClass() throws IOException, ClassNotFoundException { + Person p = new Person(); + p.setAge(30); + p.setName("abc"); + + hessian2ObjectOutput.writeObject(p); + hessian2ObjectOutput.flushBuffer(); + pos.close(); + + //All the information is lost here + Object result = hessian2ObjectInput.readObject(); + + assertThat(result, not(nullValue())); +// assertThat(result.getName(), is("abc")); +// assertThat(result.getAge(), is(30)); + } +} diff --git a/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/Person.java b/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/Person.java new file mode 100644 index 000000000..ea6664528 --- /dev/null +++ b/dubbo-serialization-extensions/dubbo-serialization-native-hessian/src/test/java/org/apache/dubbo/serialize/hessian/Person.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dubbo.serialize.hessian; + +import java.io.Serializable; +import java.util.Arrays; + +public class Person implements Serializable { + byte oneByte = 123; + private String name = "name1"; + private int age = 11; + + private String[] value = {"value1", "value2"}; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public byte getOneByte() { + return oneByte; + } + + public void setOneByte(byte b) { + this.oneByte = b; + } + + public int getAge() { + return age; + } + + public void setAge(int age) { + this.age = age; + } + + public String[] getValue() { + return value; + } + + public void setValue(String[] value) { + this.value = value; + } + + @Override + public String toString() { + return String.format("Person name(%s) age(%d) byte(%s) [value=%s]", name, age, oneByte, Arrays.toString(value)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + age; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + Arrays.hashCode(value); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Person other = (Person) obj; + if (age != other.age) + return false; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + if (!Arrays.equals(value, other.value)) + return false; + return true; + } +} diff --git a/dubbo-serialization-extensions/dubbo-serialization-protobuf/src/main/java/org/apache/dubbo/common/serialize/protobuf/support/GenericProtobufSerialization.java b/dubbo-serialization-extensions/dubbo-serialization-protobuf/src/main/java/org/apache/dubbo/common/serialize/protobuf/support/GenericProtobufSerialization.java index 12d2f658a..53b69a0e0 100644 --- a/dubbo-serialization-extensions/dubbo-serialization-protobuf/src/main/java/org/apache/dubbo/common/serialize/protobuf/support/GenericProtobufSerialization.java +++ b/dubbo-serialization-extensions/dubbo-serialization-protobuf/src/main/java/org/apache/dubbo/common/serialize/protobuf/support/GenericProtobufSerialization.java @@ -17,7 +17,6 @@ package org.apache.dubbo.common.serialize.protobuf.support; import org.apache.dubbo.common.URL; -import org.apache.dubbo.common.serialize.Constants; import org.apache.dubbo.common.serialize.ObjectInput; import org.apache.dubbo.common.serialize.ObjectOutput; import org.apache.dubbo.common.serialize.Serialization; @@ -25,6 +24,8 @@ import java.io.InputStream; import java.io.OutputStream; +import static org.apache.dubbo.common.serialize.Constants.PROTOBUF_SERIALIZATION_ID; + /** *

* Currently, the Dubbo protocol / framework data, such as attachments, event data, etc., @@ -43,7 +44,7 @@ public class GenericProtobufSerialization implements Serialization { @Override public byte getContentTypeId() { - return Constants.PROTOBUF_SERIALIZATION_ID; + return PROTOBUF_SERIALIZATION_ID; } @Override diff --git a/pom.xml b/pom.xml index e87ab7819..d9309e90f 100644 --- a/pom.xml +++ b/pom.xml @@ -315,6 +315,7 @@ true once + ${argline} ${jacocoArgLine}